Ejemplo n.º 1
0
    def getWaveformNSCL(self, seedname, starttime, duration):
        """
        Gets a regular expression of channels from a start time for a duration
        in seconds. The regular expression must represent all characters of
        the 12-character NNSSSSSCCCLL pattern e.g. "US.....[BSHE]HZ.." is
        valid, but "US.....[BSHE]H" is not. Complex regular expressions are
        permitted "US.....BHZ..|CU.....[BH]HZ.."

        .. rubric:: Notes

        For detailed information regarding the usage of regular expressions
        in the query, see also the documentation for CWBQuery ("CWBQuery.doc")
        available at ftp://hazards.cr.usgs.gov/CWBQuery/.
        Using ".*" regular expression might or might not work. If the 12
        character seed name regular expression is less than 12 characters it
        might get padded with spaces on the server side.

        :type seedname: str
        :param seedname: The 12 character seedname or 12 character regexp
            matching channels
        :type start: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param start: The starting date/time to get
        :type duration: float
        :param duration: The duration in seconds to get
        :rtype: :class:`~obspy.core.stream.Stream`
        :returns: Stream object with requested data

        .. rubric:: Example

        >>> from obspy.neic import Client
        >>> from obspy import UTCDateTime
        >>> client = Client()
        >>> t = UTCDateTime() - 5 * 3600  # 5 hours before now
        >>> st = client.getWaveformNSCL("IUANMO BH.00", t, 10)
        >>> print(st)  # doctest: +ELLIPSIS
        3 Trace(s) in Stream:
        IU.ANMO.00.BH... | 20.0 Hz, 201 samples
        IU.ANMO.00.BH... | 20.0 Hz, 201 samples
        IU.ANMO.00.BH... | 20.0 Hz, 201 samples
        """
        start = str(UTCDateTime(starttime)).replace("T", " ").replace("Z", "")
        line = "'-dbg' '-s' '%s' '-b' '%s' '-d' '%s'\t" % \
            (seedname, start, duration)
        if self.debug:
            print(ascdate() + " " + asctime() + " line=" + line)
        success = False
        while not success:
            try:
                s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                with NamedTemporaryFile() as tf:
                    if self.debug:
                        print(ascdate(), asctime(), "connecting temp file",
                              tf.name)
                    s.connect((self.host, self.port))
                    s.setblocking(0)
                    s.send(line.encode('ascii', 'strict'))
                    if self.debug:
                        print(ascdate(), asctime(), "Connected - start reads")
                    slept = 0
                    maxslept = self.timeout / 0.05
                    totlen = 0
                    while True:
                        try:
                            data = s.recv(102400)
                            if self.debug:
                                print(ascdate(), asctime(), "read len",
                                      str(len(data)), " total", str(totlen))
                            if data.find(b"EOR") >= 0:
                                if self.debug:
                                    print(ascdate(), asctime(), b"<EOR> seen")
                                tf.write(data[0:data.find(b"<EOR>")])
                                totlen += len(data[0:data.find(b"<EOR>")])
                                tf.seek(0)
                                try:
                                    st = read(tf.name, 'MSEED')
                                except Exception as e:
                                    st = Stream()
                                st.trim(starttime, starttime + duration)
                                s.close()
                                success = True
                                break
                            else:
                                totlen += len(data)
                                tf.write(data)
                                slept = 0
                        except socket.error as e:
                            if slept > maxslept:
                                print(ascdate(), asctime(),
                                      "Timeout on connection",
                                      "- try to reconnect")
                                slept = 0
                                s.close()
                            sleep(0.05)
                            slept += 1
            except socket.error as e:
                print(traceback.format_exc())
                print("CWB QueryServer at " + self.host + "/" + str(self.port))
                raise
            except Exception as e:
                print(traceback.format_exc())
                print("**** exception found=" + str(e))
                raise
        if self.debug:
            print(ascdate() + " " + asctime() + " success?  len=" +
                  str(totlen))
        st.merge(-1)
        return st
Ejemplo n.º 2
0
def plot_lag_all(home,project_name,cata_name,sta_name,filter_slope,ref_OT="2018-05-04T22:32:54.650",coast_path=''):
    import matplotlib
    matplotlib.use('pdf') #instead using interactive backend
    import matplotlib.pyplot as plt
    import seaborn as sns
    
    from repeq import data_proc
    from repeq.EQreloc import get_lonlat
    
    import pandas as pd
    '''
        read home/project_name/output/Template_match/Measure_lag/measure_lag_all.npy and plot slope of shift measurements
        cata_name: catalog name
        sta_name: station table created from data_proc.make_sta_table
        filter_slope: parameter for measuring slope
        filter_slope = {
        'diff_t':60,    #minimum dt between template and detected_OT (note the definition is different from filter_detc used by data_proc.bulk_cut_dailydata)
        'aligned_CC':0.7,
        'measured_CC':0.5,
        'max_shift':0.5, #drop the shift larger than this number (very large shift due to cycle slip)
        'min_length':0.7, #length of the available time series pass the above criteria (0~1)
        'cal_range':[5,10], # time range for slope calculation
        }
    '''
    sns.set()
    sns.set_palette('husl',n_colors=10)

    # load coast data if given
    if coast_path:
        coast=np.genfromtxt(coast_path)

    # load catalog
    df = data_proc.cat2pd(home+'/'+project_name+'/catalog/'+cata_name)

    # load station table in home/project_nam/stations/stations.txt
    sta_table = pd.read_table(home+'/'+project_name+'/stations/'+sta_name,header=None,names=['stlon','stlat','stelev','stname'],sep=' ')

    #load all lag measurements (this is a huge file, make sure memory fit)
    lag_all = np.load(home+'/'+project_name+'/output/Template_match/Measure_lag/'+'measure_lag_all.npy',allow_pickle=True)
    lag_all = lag_all.item()

    print('*****Add depth filter all events should >= 5km*****')

    sav_slope = {} #with sta as key
    for ik in lag_all.keys():
        temp_OT = lag_all[ik]['template_OT']
        #find the corresponding eqinfo
        tmp_df = df[(df.Date==temp_OT.split('T')[0]) & (df.Time==temp_OT.split('T')[1] )  ]
        if tmp_df.iloc[0].Depth < 2.0:
            continue


        #loop all the detections
        for detc_OT in lag_all[ik]['detc_OT'].keys():
            if np.abs(UTCDateTime(temp_OT)-UTCDateTime(detc_OT))<filter_slope['diff_t']:
                continue #dont want the detection too close to template(basically itself)

            #loop every stations
            for sta in lag_all[ik]['detc_OT'][detc_OT].keys():
                #in each lag_all[ik]['detc_OT'][detc_OT][sta] there are "time","shift","CCC" keys
                time = lag_all[ik]['detc_OT'][detc_OT][sta]['time']
                shift = lag_all[ik]['detc_OT'][detc_OT][sta]['shift']
                CCC = lag_all[ik]['detc_OT'][detc_OT][sta]['CCC']

                #find the alignedCC (time closest to zero)
                zeroidx = np.where(np.abs(time) == np.min(np.abs(time)))[0][0]
                if CCC[zeroidx]<filter_slope['aligned_CC']:
                    continue #alignment is not robust
                        
                #take the time,shift and fit by a slope
                idx = np.where((CCC>=filter_slope['measured_CC']) & (shift<filter_slope['max_shift']) & (time>=filter_slope['cal_range'][0]) & (time<=filter_slope['cal_range'][1]) )[0] #shift cannt be too large otherwise is cycle slip
                idx_t = np.where((time>=filter_slope['cal_range'][0]) & (time<=filter_slope['cal_range'][1]))[0]
                #idx = np.where((CCC>=filter_slope['measured_CC']) & (shift<filter_slope['max_shift']) & (time>=0) )[0] #shift cannt be too large otherwise is cycle slip
                #if len(idx) >= (len(shift)*filter_slope['min_length']):
                if len(idx) >= (len(idx_t)*filter_slope['min_length']):
                    #plt.plot(time[idx],shift[idx])
                    #plt.show()
                    #80% data pass threshold, then calculate slope
                    M = data_proc.cal_slope(time[idx],shift[idx])
                    #sav_slope.append(M[1]) #M[0] is intercept, M[1] is slope
                    G = np.hstack([np.ones([len(idx),1]),time[idx].reshape(-1,1)])
                    yhat = np.dot(G,M.reshape(-1,1))
                    fit_std = np.std(yhat-shift[idx]) #standard deviation of misfit
                    #sav_std.append(fit_std)
                    #get the reference time WRS to template_OT
                    ref_tempT = (UTCDateTime(detc_OT)-UTCDateTime(temp_OT))/86400.0 #relative days from template
                    #sav_reftime.append(ref_tempT)
                    #create new sta key if its not there
                    if not (sta in sav_slope):
                        sav_slope[sta] = {temp_OT:{'slope':[],'std':[],'ref_time':[],'ID':ik}}
                    #template_OT as new key
                    if not (lag_all[ik]['template_OT'] in sav_slope[sta]):
                        sav_slope[sta][temp_OT] = {'slope':[],'std':[],'ref_time':[],'ID':ik}
                    #appending data
                    sav_slope[sta][temp_OT]['slope'] = np.hstack([sav_slope[sta][temp_OT]['slope'],M[1]]) #appending data as array
                    sav_slope[sta][temp_OT]['std'] = np.hstack([sav_slope[sta][temp_OT]['std'],fit_std]) #
                    sav_slope[sta][temp_OT]['ref_time'] = np.hstack([sav_slope[sta][temp_OT]['ref_time'],ref_tempT])


    ref_OT = UTCDateTime(ref_OT)
    #=========plot station result============
    for sta in sav_slope.keys():
        plt.figure(figsize=(8.5,4.5))
        plt.subplot(1,2,1)
        #loop templates
        n_meas = 0 #n-measurements
        sav_tmplon = [] #save template lon
        sav_tmplat = [] #template lat
        sav_h = []
        for temp in sav_slope[sta].keys():
            #for each template, all the measurements at this station
            time = sav_slope[sta][temp]['ref_time'] #0 is the template time
            slope = sav_slope[sta][temp]['slope']
            stdn = sav_slope[sta][temp]['std']
            #add templat itself in the data
            time = np.hstack([time,0])
            slope = np.hstack([slope,0])
            stdn = np.hstack([stdn,0])
            #sort
            sor_idx = np.argsort(time)
            time = time[sor_idx]
            slope = slope[sor_idx]
            stdn = stdn[sor_idx]
            dt_main = (UTCDateTime(temp)-ref_OT)/86400.0 #set t at ref_OT = 0
            time += dt_main
            #check if slope measurement across the mainshock
            if not ((time.min()<0) & (time.max()>0)):
                continue
            #not plot short sequence
            if len(time)< 5:
                continue
            #if (time.max()<0):
            #    continue
            #plt.errorbar(time,slope+n_meas,stdn)
            scale_slope = np.std(slope)
            #h = plt.plot(time,slope+n_meas*0.01,'.-') #the old scaling
            h = plt.plot(time,slope/scale_slope+n_meas*3,'.-')  #normalize by their std
            sav_h.append(h[0])
            '''
            #old scaling
            plt.plot(dt_main,0+n_meas*0.01,'v',markerfacecolor=[1,0,0],markeredgecolor=[0,0,0]) #template triangle mark
            plt.plot([-10,10],[n_meas*0.01,n_meas*0.01],'k--',linewidth=0.5)
            '''
            plt.plot(dt_main,0+n_meas*3,'v',markerfacecolor=[1,0,0],markeredgecolor=[0,0,0]) #template triangle mark
            plt.plot([-10,10],[n_meas*3,n_meas*3],'k--',linewidth=0.5)
            #save template information (loc)
            sav_tmplon.append(df.iloc[ int(sav_slope[sta][temp]['ID']) ].Lon)
            sav_tmplat.append(df.iloc[ int(sav_slope[sta][temp]['ID']) ].Lat)
            n_meas += 1
        if n_meas == 0:
            plt.close()
            continue
        #plt.plot([0,0],[-0.01,n_meas*0.01],'r',linewidth=0.5)
        plt.plot([0,0],[-3,n_meas*3],'r',linewidth=0.5)
        plt.xlim([-5,5])
        #plt.ylim([-0.01,n_meas*0.01])
        plt.ylim([-3,n_meas*3])
        plt.yticks([],[])
        plt.xlabel('Day relative to mainshock',fontsize=15,labelpad=0)
        plt.title(sta,fontsize=15)
        plt.grid(False)
        #another subplot plot map
        plt.subplot(1,2,2)
        for itmp in range(len(sav_tmplon)):
            plt.plot(sav_tmplon[itmp],sav_tmplat[itmp],'o',color=sav_h[itmp].get_color(),markeredgecolor=[0,0,0],mew=0.8,alpha=0.9)
        if coast_path:
            plt.plot(coast[:,0],coast[:,1],'k-')
        #get station lon,lat
        stlon,stlat = get_lonlat(sta_table,[sta])
        plt.plot(stlon,stlat,'^',markersize=10,color=[0,1,0],markeredgecolor=[0,0,1],mew=1)
        print('***manually plot mainshock loc, set xlim and ylim')
        plt.plot(-154.9996667,19.3181667,'*',markerfacecolor=[1,0,0],markersize=14,markeredgecolor=[0,0,0],mew=1,alpha=0.9)
        plt.xlim([-155.85,-154.74])
        plt.ylim([18.86,19.88])
        plt.xticks(rotation=30,fontsize=10)
        plt.savefig(home+'/'+project_name+'/output/Template_match/Figs/'+'slopeSummary_%s.png'%(sta))
        plt.close()
Ejemplo n.º 3
0
def read_data(archive, arc_type, day, stachans):
    """
    Function to read the appropriate data from your archive for your selected \
    day.

    :type archive: str
    :param archive: The archive source - if arc_type is seishub, this should \
        be a url, if the arc_type is FDSN then this can be either a url or a \
        known obspy client.
    :type arc_type: str
    :param arc_type: The type of archive, can be: seishub, FDSN, day_vols
    :type day: datetime.date
    :param day: Date to retrieve data for
    :type stations: list of tuple
    :param station: Stations and channels to try and get, will not fail if \
        stations are not available, but will warn.

    :returns: obspy.Stream

    .. note:: A note on arc_types, if arc_type is day_vols, then this will \
        look for directories labelled in the IRIS DMC conventions of \
        Yyyyy/Rjjj.01/... where yyyy is the year and jjj is the julian day. \
        Data within these files directories should be stored as day-long, \
        single-channel files.  This is not implemented in the fasted way \
        possible to allow for a more general situation.  If you require more \
        speed you will need to re-write this.
    """
    import obspy
    from obspy.clients.fdsn.header import FDSNException
    if arc_type.lower() == 'seishub':
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.clients.seishub import Client
        else:
            from obspy.seishub import Client
    else:
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.clients.fdsn import Client
        else:
            from obspy.fdsn import Client
    from obspy import read, UTCDateTime
    import warnings

    st = []
    available_stations = _check_available_data(archive, arc_type, day)
    for station in stachans:
        if len(station[1]) == 2:
            # Cope with two char channel naming in seisan
            station_map = (station[0], station[1][0] + '*' + station[1][1])
            available_stations_map = [(sta[0], sta[1][0] + '*' + sta[1][-1])
                                      for sta in available_stations]
        else:
            station_map = station
            available_stations_map = available_stations
        if station_map not in available_stations_map:
            msg = ' '.join([
                station[0], station_map[1], 'is not available for',
                day.strftime('%d/%m/%Y')
            ])
            warnings.warn(msg)
            continue
        if arc_type.lower() in ['seishub', 'fdsn']:
            client = Client(archive)
            try:
                st += client.get_waveforms(network='*',
                                           station=station_map[0],
                                           location='*',
                                           channel=station_map[1],
                                           starttime=UTCDateTime(day),
                                           endtime=UTCDateTime(day) + 86400)
            except FDSNException:
                warnings.warn('No data on server despite station being ' +
                              'available...')
                continue
        elif arc_type.lower() == 'day_vols':
            wavfiles = _get_station_file(
                os.path.join(archive, day.strftime('Y%Y' + os.sep + 'R%j.01')),
                station_map[0], station_map[1])
            for wavfile in wavfiles:
                st += read(wavfile)
    st = obspy.Stream(st)
    return st
Ejemplo n.º 4
0
    def get_SNR(self, t_prior=60, source_durations=1.5):
        '''
        Get SNR considering t_prior seconds before p-wave and a
        total waveofrm of source_durations length
        
        '''

        from numpy import zeros, log10
        from obspy import read
        from datetime import timedelta
        from obspy import UTCDateTime
        from string import rjust

        self.SNR_north = zeros(self.Nsta)
        self.SNR_east = zeros(self.Nsta)
        self.SNR_up = zeros(self.Nsta)

        hypo_time = UTCDateTime(self.hypo_time)

        for k in range(self.Nsta):
            #Are the waveform slices empty?
            empty = False

            path = self.path_to_data

            ptime = self.ptime[k]

            tp = hypo_time + timedelta(seconds=ptime)
            t0 = tp - timedelta(seconds=t_prior)
            tfinal = tp + timedelta(seconds=source_durations * self.duration)

            #read waveforms
            try:
                sta = rjust(str(self.station_names[k]), 4, '0')
                n = read(path + sta + '.LXN.sac')
                e = read(path + sta + '.LXE.sac')
                z = read(path + sta + '.LXZ.sac')
            except:
                print 'ERROR: ' + self.station_names[k] + '.LXN.sac not found'
                return

            #Get pre-event portions (from t0 to tp)
            n_pre = n.copy()
            e_pre = e.copy()
            z_pre = z.copy()

            n_pre[0].trim(starttime=t0, endtime=tp)
            e_pre[0].trim(starttime=t0, endtime=tp)
            z_pre[0].trim(starttime=t0, endtime=tp)

            if n_pre[0].stats.npts == 0 or e_pre[0].stats.npts == 0 or z_pre[
                    0].stats.npts == 0:
                empty = True
            else:
                mean_n = n_pre[0].data.mean()
                mean_e = e_pre[0].data.mean()
                mean_z = z_pre[0].data.mean()

                n_pre[0].data = n_pre[0].data - mean_n
                e_pre[0].data = e_pre[0].data - mean_e
                z_pre[0].data = z_pre[0].data - mean_z

                #get power
                Pn_pre = sum(n_pre[0].data**2) / n_pre[0].stats.npts
                Pe_pre = sum(e_pre[0].data**2) / e_pre[0].stats.npts
                Pz_pre = sum(z_pre[0].data**2) / z_pre[0].stats.npts

            #Trim post P-arrival signal
            n[0].trim(starttime=tp, endtime=tfinal)
            e[0].trim(starttime=tp, endtime=tfinal)
            z[0].trim(starttime=tp, endtime=tfinal)

            if n[0].stats.npts == 0 or e[0].stats.npts == 0 or z[
                    0].stats.npts == 0:
                empty = True
            else:
                n[0].data = n[0].data - mean_n
                e[0].data = e[0].data - mean_e
                z[0].data = z[0].data - mean_z

                #get power
                Pn = sum(n[0].data**2) / n[0].stats.npts
                Pe = sum(e[0].data**2) / e[0].stats.npts
                Pz = sum(z[0].data**2) / z[0].stats.npts

            #And finaly SNR
            if empty == False:
                self.SNR_north[k] = Pn / Pn_pre
                self.SNR_east[k] = Pe / Pe_pre
                self.SNR_up[k] = Pz / Pz_pre
Ejemplo n.º 5
0
def plot_detc_tcs(daily_cut,template,filter_detc,outname):
    '''
        daily_cut: cutted daily data from the data_proc.cut_dailydata
        template: template .ms data in waveforms_template
        filter_detc: filter dictionary before plot
        outname: output name
    '''
    import obspy
    import numpy as np
    import matplotlib
    matplotlib.use('pdf') #instead using interactive backend
    import matplotlib.pyplot as plt
    from obspy import UTCDateTime
    from repeq import data_proc
    if type(template)==str:
        temp = obspy.read(template)
    else:
        temp = template
    if type(daily_cut)==str:
        daily_cut = np.load(daily_cut,allow_pickle=True)
        daily_cut = daily_cut.item()
    #apply filter
    daily_cut = data_proc.clean_data_cut(daily_cut,filter_detc)
    if len(daily_cut['detc_tcs'].keys())==0:
        return 1 #nothing left, just return
    OT_temp = UTCDateTime(daily_cut['OT_template']) #origin time for template
    for ik in daily_cut['detc_tcs'].keys():
        D = daily_cut['detc_tcs'][ik]
        phase = daily_cut['phase'][ik] # assume D and phase have the same order
        OT_D = UTCDateTime(ik) #origin time of Detection (cut from daily data)
        XLIM=[]
        #create figure based on how many traces
        print('Ntraces=',len(D))
        #if 0<len(D)<=20:
        #    fig = plt.figure(figsize=(8.5,5.5))
        #elif 20<len(D)<=30:
        #    fig = plt.figure(figsize=(8.5,6.5))
        #elif 30<len(D):
        fig = plt.figure(figsize=(8.5,8.5)) #all with the same size
        for ista in range(len(D)):
            net = D[ista].stats.network
            sta = D[ista].stats.station
            channel = D[ista].stats.channel
            location = D[ista].stats.location
            PS = phase[ista] #'P' or 'S'
            selected_temp = temp.select(network=net,station=sta,channel=channel,location=location)
            selected_temp = selected_temp.copy()
            #most of the case should return only 1 data, but if there's P and S in 1 station...
            if len(selected_temp)!=1:
                t1 = selected_temp[0].stats.starttime
                t2 = selected_temp[1].stats.starttime
                print('phase=',PS)
                if t2-t1>0:
                    if PS=='P':
                        selected_temp = obspy.Stream(selected_temp[0])
                        print('return first one')
                    elif PS=='S':
                        selected_temp = obspy.Stream(selected_temp[1])
                        print('return second one')
                else:
                    if PS=='P':
                        selected_temp = obspy.Stream(selected_temp[1])
                    elif PS=='S':
                        selected_temp = obspy.Stream(selected_temp[0])
                print('multiple data selected, return data based on basic PS wave assumption') #have to check this!
                #continue #!!!!!!!! deal with this later!!!!!!!!!!
            #dealing with time
            T_D = D[ista].times()
            T_temp = selected_temp[0].times() #length should only be 1, unless P/S in same data
            #Time relative to origin, so that at origin is zero
            dt_D = D[ista].stats.starttime-OT_D
            T_D = T_D+dt_D
            dt_temp = selected_temp[0].stats.starttime-OT_temp
            T_temp = T_temp+dt_temp
            #normalize data
            data_D = D[ista].data/np.max(D[ista].data)
            #data_D = D[ista].data/np.max(selected_temp[0].data) #normalize the data based on template amplitude, not daily data amplitude
            data_temp = selected_temp[0].data/np.max(selected_temp[0].data)
            #data_temp = selected_temp[0].data/np.max(D[ista].data)
            #plot both tcs
            plt.plot(T_D,data_D+ista*1.5,'k')
            if PS=='P':
                plt.plot(T_temp,data_temp+ista*1.5,'r')
            else:
                plt.plot(T_temp,data_temp+ista*1.5,'b')
            #get xlim bound
            if ista==0:
                XLIM.append(T_temp[0]-1)
        XLIM.append(T_temp[-1]+1)
        YLIM = plt.ylim()
        YLIM = [-1,ista*1.5+1]
        YLIM = [YLIM[0],YLIM[1]+0.08*(YLIM[1]-YLIM[0]) ]
        #add text
        props = dict(boxstyle='round', facecolor='white', alpha=0.5)
        text_xloc = (XLIM[1]-XLIM[0])*0.04+XLIM[0]
        text_yloc = (YLIM[1]-YLIM[0])*0.86+YLIM[0]
        text_yloc_temp = (YLIM[1]-YLIM[0])*0.94+YLIM[0]
        plt.text(text_xloc,text_yloc,ik,fontsize=12,bbox=props)
        plt.text(text_xloc,text_yloc_temp,OT_temp.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-4],fontsize=12,color=[1,0,0],bbox=props)
        plt.xlabel('Origin time (s)',fontsize=15,labelpad=0)
        plt.xticks(fontsize=12)
        plt.yticks([],[])
        #add title
        plt.title('CC=%.2f'%(daily_cut['meanCC'][ik]))
        ax1 = plt.gca()
        ax1.tick_params(pad=1) #make axis closer
        plt.xlim(XLIM)
        plt.ylim(YLIM)
        #savName = template.split('_')[-1].split('.')[0] #this is the template ID
        if outname:
            print('save fig:',outname+ik+'.png')
            plt.savefig(outname+ik.replace(':','')+'.png',dpi=300)
        #plt.show()
        plt.close()
Ejemplo n.º 6
0
def main():

    # Run Input Parser
    args = arguments.get_correct_arguments()

    # Load Database
    db = stdb.io.load_db(fname=args.indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(args.stkeys) > 0:
        stkeys = []
        for skey in args.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Path where transfer functions will be located
        transpath = Path('TF_STA') / stkey
        if not transpath.is_dir():
            raise (Exception("Path to " + str(transpath) +
                             " doesn`t exist - aborting"))

        # Path where event data are located
        eventpath = Path('EVENTS') / stkey
        if not eventpath.is_dir():
            raise (Exception("Path to " + str(eventpath) +
                             " doesn`t exist - aborting"))

        # Path where plots will be saved
        if args.saveplot:
            plotpath = eventpath / 'PLOTS'
            if not plotpath.is_dir():
                plotpath.mkdir(parents=True)
        else:
            plotpath = False

        # Get catalogue search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get catalogue search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        # Find all files in directories
        p = eventpath.glob('*.*')
        event_files = [x for x in p if x.is_file()]
        p = transpath.glob('*.*')
        trans_files = [x for x in p if x.is_file()]

        # Check if folders contain anything
        if not event_files:
            raise (Exception("There are no events in folder " +
                             str(eventpath)))

        if not trans_files:
            raise (Exception("There are no transfer functions in folder " +
                             str(transpath)))

        # Cycle through available files
        for eventfile in event_files:

            # Skip hidden files and folders
            if eventfile.name[0] == '.':
                continue

            evprefix = eventfile.name.split('.')
            evstamp = evprefix[0] + '.' + evprefix[1] + '.'

            evDateTime = UTCDateTime(evprefix[0] + '-' + evprefix[1])
            if evDateTime >= tstart and evDateTime <= tend:

                # Load event file
                try:
                    file = open(eventfile, 'rb')
                    eventstream = pickle.load(file)
                    file.close()
                except:
                    print("File " + str(eventfile) +
                          " exists but cannot be loaded")
                    continue

            else:
                continue

            if args.fig_event_raw:
                fname = stkey + '.' + evstamp + 'raw'
                plot = plotting.fig_event_raw(eventstream,
                                              fmin=args.fmin,
                                              fmax=args.fmax)

                if plotpath:
                    plot.savefig(plotpath / (fname + '.' + args.form),
                                 dpi=300,
                                 bbox_inches='tight',
                                 format=args.form)
                else:
                    plot.show()

            # Cycle through corresponding TF files
            for transfile in trans_files:

                # Skip hidden files and folders
                if transfile.name[0] == '.':
                    continue

                tfprefix = transfile.name.split('transfunc')[0]

                # This case refers to the "cleaned" spectral averages
                if len(tfprefix) > 9:
                    if not args.skip_clean:
                        yr1 = tfprefix.split('-')[0].split('.')[0]
                        jd1 = tfprefix.split('-')[0].split('.')[1]
                        yr2 = tfprefix.split('-')[1].split('.')[0]
                        jd2 = tfprefix.split('-')[1].split('.')[1]
                        date1 = UTCDateTime(yr1 + '-' + jd1)
                        date2 = UTCDateTime(yr2 + '-' + jd2)
                        dateev = UTCDateTime(evprefix[0] + '-' + evprefix[1])
                        if dateev >= date1 and dateev <= date2:
                            print(
                                str(transfile) +
                                " file found - applying transfer functions")

                            try:
                                file = open(transfile, 'rb')
                                tfaverage = pickle.load(file)
                                file.close()
                            except:
                                print("File " + str(transfile) +
                                      " exists but cannot be loaded")
                                continue

                            # List of possible transfer functions for station
                            # average files
                            eventstream.correct_data(tfaverage)

                            correct = eventstream.correct
                            if args.fig_plot_corrected:
                                fname = stkey + '.' + evstamp + 'sta_corrected'
                                plot = plotting.fig_event_corrected(
                                    eventstream, tfaverage.tf_list)
                                # Save or show figure
                                if plotpath:
                                    plot.savefig(plotpath /
                                                 (fname + '.' + args.form),
                                                 dpi=300,
                                                 bbox_inches='tight',
                                                 format=args.form)
                                else:
                                    plot.show()

                # This case refers to the "daily" spectral averages
                else:
                    if not args.skip_daily:
                        if tfprefix == evstamp:
                            print(
                                str(transfile) +
                                " file found - applying transfer functions")

                            try:
                                file = open(transfile, 'rb')
                                tfaverage = pickle.load(file)
                                file.close()
                            except:
                                print("File " + str(transfile) +
                                      " exists but cannot be loaded")
                                continue

                            # List of possible transfer functions for station
                            # average files
                            eventstream.correct_data(tfaverage)

                            correct = eventstream.correct
                            if args.fig_plot_corrected:
                                fname = stkey + '.' + evstamp + 'day_corrected'
                                plot = plotting.fig_event_corrected(
                                    eventstream, tfaverage.tf_list)
                                # Save or show figure
                                if plotpath:
                                    plot.savefig(plotpath /
                                                 (fname + '.' + args.form),
                                                 dpi=300,
                                                 bbox_inches='tight',
                                                 format=args.form)
                                else:
                                    plot.show()
Ejemplo n.º 7
0
    def get_PGD(self, t_prior=60, source_durations=2.0):
        '''
        Get SNR considering t_prior seconds before p-wave and a
        total waveofrm of source_durations length
        
        '''

        from numpy import zeros, log10
        from obspy import read
        from datetime import timedelta
        from obspy import UTCDateTime
        from string import rjust

        self.PGD_north = zeros(self.Nsta)
        self.PGD_east = zeros(self.Nsta)
        self.PGD_up = zeros(self.Nsta)
        self.PGD_cart = zeros(self.Nsta)

        hypo_time = UTCDateTime(self.hypo_time)

        for k in range(self.Nsta):

            #Are the waveform slices empty?
            empty = False

            path = self.path_to_data

            ptime = self.ptime[k]

            tp = hypo_time + timedelta(seconds=ptime)
            t0 = tp - timedelta(seconds=t_prior)
            tfinal = tp + timedelta(seconds=source_durations * self.duration)

            #read waveforms
            try:
                sta = rjust(str(self.station_names[k]), 4, '0')
                n = read(path + sta + '.LXN.sac')
                e = read(path + sta + '.LXE.sac')
                z = read(path + sta + '.LXZ.sac')
            except:
                print 'ERROR: ' + self.station_names[k] + '.LXN.sac not found'
                return

            #Get pre-event portions (from t0 to tp)
            n_pre = n.copy()
            e_pre = e.copy()
            z_pre = z.copy()

            n_pre[0].trim(starttime=t0, endtime=tp)
            e_pre[0].trim(starttime=t0, endtime=tp)
            z_pre[0].trim(starttime=t0, endtime=tp)

            if n_pre[0].stats.npts == 0 or e_pre[0].stats.npts == 0 or z_pre[
                    0].stats.npts == 0:
                empty = True
            else:
                #Get pre-event mean to remove from waveform
                mean_n = n_pre[0].data.mean()
                mean_e = e_pre[0].data.mean()
                mean_z = z_pre[0].data.mean()

            #Trim post P-arrival signal
            n[0].trim(starttime=tp, endtime=tfinal)
            e[0].trim(starttime=tp, endtime=tfinal)
            z[0].trim(starttime=tp, endtime=tfinal)

            if n[0].stats.npts == 0 or e[0].stats.npts == 0 or z[
                    0].stats.npts == 0:
                empty = True
            else:
                n[0].data = n[0].data - mean_n
                e[0].data = e[0].data - mean_e
                z[0].data = z[0].data - mean_z

                #Construct "cartesian" sum waveform
                c = n.copy()
                c[0].data = (n[0].data**2 + e[0].data**2 + z[0].data**2)**0.5

            if empty == False:  #there was enough data
                #get PGD
                self.PGD_north[k] = abs(n[0].data).max()
                self.PGD_east[k] = abs(e[0].data).max()
                self.PGD_up[k] = abs(z[0].data).max()
                self.PGD_cart[k] = abs(c[0].data).max()
def plot(displacement_RMS,
         band = "4.0-14.0",
         logo = 'https://upload.wikimedia.org/wikipedia/commons/thumb/4/44/Logo_SED_2014.png/220px-Logo_SED_2014.png',
         bans = {"2020-03-13":'Groups >100 banned',
                 "2020-03-20":'Groups >5 banned'},
         type = '*',
         scale = 1e9,
         unit = 'nm',
         time_zone = "Europe/Brussels",
         sitedesc = "",# "in Uccle (Brussels, BE)", in original example
         show = True,
         save = None,
         format = 'pdf',
         self = None,
         data_provider='ETH',
         basename=None,
         ):
    if save is not None and not os.path.isdir(save):
        os.makedirs(save)

    for channelcode in list(set([k[:-1] for k in displacement_RMS])):
        
        
        data={}
        for o in 'ZEN':
            if channelcode+o not in displacement_RMS :
                continue
            data[channelcode[-2:]+o] = displacement_RMS[channelcode+o][band]
            main=channelcode[-2:]+o
            
        if len(data.keys())>1:
            data[channelcode[-2:]+'*'] = data[main].copy().resample("30min").median().tshift(30, "min") # for the sum
            main=channelcode[-2:]+'*'
            for i,t in enumerate(data[main].index):
                data[main][i] = 0
            for o in data:
                if o == main:
                    continue
                data[o] = data[o].copy().resample("30min" ).median().tshift(30, "min")
                for i,t in enumerate(data[main].index):
                    if len(data[o].index)-1<i:
                        break
                    if True:#abs(data[o].index[i].timestamp()-data[main].index[i].timestamp())<60:
                        data[main][i] += data[o][i]**2
            for i,t in enumerate(data[main].index):
                data[main][i] = data[main][i]**.5

        data[main] = localize_tz_and_reindex(data[main], "30Min", time_zone = time_zone)
        
        if basename is None:
            basename = "%s%s-%s"%(save,
                                  channelcode[:]+main[-1],
                                  band)

        if type in ['*', 'all', 'sitemaps']:
            ax=sitemap(channelcode[:]+main[-1],
                       data_provider=data_provider,
                       self=self)
            if save is not None:
                ax.figure.savefig("%s-map.%s"%(basename,format),
                                  bbox_inches='tight')
            if show:
                plt.show()
                                
        if type in ['*', 'all', 'clockmaps']:
            ax = hourmap(data[main],
                         bans=bans,
                         scale=scale,
                         unit=unit)
            title = 'Seismic Noise for %s - Filter: [%s] Hz' % (channelcode[:]+main[-1],band)
            ax.set_title('Seismic Noise for %s - Filter: [%s] Hz' % (channelcode[:]+main[-1],band))
            if save is not None:
                ax.figure.savefig("%s-hourmap.%s"%(basename,format),
                                  bbox_inches='tight',
                                  facecolor='w')
            if show:
                plt.show()

        if type in ['*', 'all', 'gridmaps']:
            ax = gridmap(data[main],
                         bans=bans,
                         scale=scale,
                         unit=unit)
            title = 'Seismic Noise for %s - Filter: [%s] Hz' % (
            channelcode[:] + main[-1], band)
            ax.set_title('Seismic Noise for %s - Filter: [%s] Hz' % (
            channelcode[:] + main[-1], band))
            if save is not None:
                ax.figure.savefig("%s-gridmap.%s" % (basename, format),
                                  bbox_inches='tight',
                                  facecolor='w')
            if show:
                plt.show()

        if type in ['*', 'all', 'timeseries']:
            fig = plt.figure(figsize=(12,6))
            if logo is not None:
                fig.figimage(plt.imread(logo),
                             40, 40, alpha=.4, zorder=1)
            plt.plot(data[main].index, data[main], label = main)
            
            for o in data:
                rs = data[o].copy().between_time("6:00", "16:00")
                rs = rs.resample("1D" ).median().tshift(12, "H")
                plt.plot(rs.index, rs,
                         label="$\overline{%s}$ (6h-16h)"%o)#, c='purple')

            

            # Get normal business days and set their background color to green
            db = pd.bdate_range(min(data[main].index),
                                max(data[main].index))
            for dbi in db:
                plt.axvspan(dbi, dbi+datetime.timedelta(days=1),
                            facecolor='lightgreen', edgecolor="none",
                            alpha=0.2, zorder=-10)

            plt.ylim(0,np.nanpercentile(data[main],95)*1.5)
            plt.ylim(0,np.nanpercentile(data[main],95)*1.5)
            ticks = ticker.FuncFormatter(lambda x, pos: "{0:g}".format(x*scale))
            plt.gca().yaxis.set_major_formatter(ticks)
            plt.ylabel("Displacement (%s)"%unit)

            plt.title('Seismic Noise for %s - Filter: [%s] Hz' % (channelcode[:]+main[-1],
                                                                  band))
            plt.xlim(data[main].index.min(), data[main].index.max())
            fig.autofmt_xdate()
            plt.grid(True, zorder=-1)
            plt.gca().set_axisbelow(True)
            for iban,ban in enumerate(bans.keys()):
                plt.axvline(UTCDateTime(ban).datetime,
                            color='r',
                            linewidth=2,
                            linestyle=['-', '--', '-.', ':', '-', '--', '-.', ':', '-', '--', '-.', ':'][iban],
                            path_effects=[pe.withStroke(linewidth=4, foreground="k")],
                            zorder=-9,
                            label='\n'.join(wrapper.wrap(bans[ban])))
            plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))
            
            ## Idea: add map in an inset below the legend 
            #axins = inset_axes(ax, width="100%", height="100%",
            #                   bbox_to_anchor=(1.05, .6, .5, .4),
            #                   bbox_transform=ax.transAxes, loc=2, borderpad=0)
            #axins.tick_params(left=False, right=True, labelleft=False, labelright=True)
            if save is not None:
                fig.savefig("%s.%s"%(basename,format),
                            bbox_inches='tight',
                            facecolor='w')
            if show:
                plt.show()
        
        if type in ['*', 'all', 'clockplots', 'dailyplots']:
            preloc = data[main].loc[:max(list(bans.keys()))]
            preloc = preloc.set_index([preloc.index.day_name(), preloc.index.hour+preloc.index.minute/60.])
            postloc = data[main].loc[max(list(bans.keys())):]
            postloc = postloc.set_index([postloc.index.day_name(), postloc.index.hour+postloc.index.minute/60.])
            cmap = plt.get_cmap("tab20")

            if type in ['*', 'all', 'dailyplots']:
                ax = stack_wday_time(preloc,scale).plot(figsize=(14,8), cmap = cmap)
                if len(postloc):
                    stack_wday_time(postloc,scale).plot(ls="--", ax=ax, legend=False,cmap = cmap)
                
                plt.title("Daily Noise Levels in %s" % (channelcode[:]+main[-1]))
                plt.ylabel("Amplitude (%s)"%unit)
                plt.xlabel("Hour of day (local time)")
                plt.grid()
                plt.xlim(0,23)
                plt.ylim(0,np.nanpercentile(data[main],95)*1.5*scale)
                if save is not None:
                    ax.figure.savefig("%s-daily.%s"%(basename,format),
                                      bbox_inches='tight',
                                      facecolor='w')
                if show:
                    plt.show()

            if type in ['*', 'all', 'clockplots']:
                # Polar/clock Plot:
                _ = stack_wday_time(preloc,scale).copy()
                _.loc[len(_)+1] = _.iloc[0]
                _.index = radial_hours(len(_))
                
                #subplot_kw = {'polar':True}
                #opts={#'sharey':True,
                #      'figsize':(12,6),
                #      'subplot_kw':subplot_kw}
                #fig, axes  = plt.subplots(1,2,**opts)

                plt.figure(figsize=(12,6))
                ax = plt.subplot(121, polar=True)
                _.plot(ax=ax)#es[0])
    
                plt.title("Before Lockdown", fontsize=12)
                clock24_plot_commons(ax,unit=unit)#es[0])
                ax.set_rmax(np.nanpercentile(data[main],95)*1.5*scale)
                ax.set_rmin(0)
                ax = plt.subplot(122, polar=True, sharey=ax)
                if len(postloc):
                    _ = stack_wday_time(postloc,scale).copy()
                    _.loc[len(_)+1] = _.iloc[0]
                    _.index = radial_hours(len(_))
                    _.plot(ax=ax,#es[0], 
                           ls="--")
    
                plt.title("After Lockdown", fontsize=12)
                clock24_plot_commons(ax,unit=unit)#es[0])
                # ax.set_rmax(np.nanpercentile(data[main],95)*1.5*scale)
                
                suptitle = "Day/Hour Median Noise levels %s\n"
                suptitle += "Station %s - [%s] Hz"
                plt.suptitle(suptitle % (sitedesc,
                                         channelcode[:]+main[-1],
                                         band),
                             fontsize=16)
                plt.subplots_adjust(top=0.80)
                if save is not None:
                    fig = ax.figure
                    fig.savefig("%s-hourly.%s"%(basename,format),
                                bbox_inches='tight',
                                facecolor='w')
                if show:
                    plt.show()
    def load(self,
             network = 'CH',
             station = 'SGEV',
             location = '',
             channel = 'HGZ,HGE,HGN',
             start = UTCDateTime()-3*24*60*60,#"2020-03-07")
             end = UTCDateTime(),# means "now"
             freqs = [(0.1,1.0),(1.0,20.0),(4.0,14.0),(4.0,20.0)],
             save='./',
             clientpqlx=True,
             clientobspy=False,
             steps={'clientpqlx':30,'clientobspy':15},
             sshuserhost='user@hostname',
             tocsv=False,
             slow=False,
             output="DISP",
             **args):
        
        self.displacement_RMS = {}
        if clientpqlx:
            step = steps['clientpqlx']
        if clientobspy:
            step = steps['clientobspy']
        if save is not None and not os.path.isdir(save):
            os.makedirs(save)
        loadfile = '%sSeismoSocialDistancing.h5'%save
        store = pd.HDFStore(loadfile)
        for n in network.split(','):
            for s in station.split(','):
                for l in location.split(','):
                    for c in channel.split(','):
                        backfill = [[start.datetime,end.datetime]]
                        mseedid='%s.%s.%s.%s'%(n,s,l,c)
                        if '/'+mseedid.replace('.','_') in store:
                            tmp=store.select(mseedid.replace('.','_'), 
                                             columns=["%.1f-%.1f"%f for f in freqs], 
                                             where=['index>=start.datetime and index<=end.datetime'])
                            if len(tmp)>0:
                                print('Loaded',mseedid,min(tmp.index),max(tmp.index))
                                tlist = pd.date_range(start.datetime, end.datetime, freq="%dmin"%step)
                                backfill=[[UTCDateTime("1920-03-05").datetime]]
                                for i,t in enumerate(tlist):
                                    if min(abs((t-tmp.index).total_seconds())) > step*60*1.5 :
                                        if (t-backfill[-1][-1]).total_seconds() > step*60*1.5:
                                            backfill+=[[t, t]]
                                        else :
                                            backfill[-1][-1]=t
                                backfill = backfill[1:]
                                self.displacement_RMS[mseedid]=tmp
                        if len(backfill)==0:
                            continue
                        if clientpqlx:
                            for bf in backfill:
                                print('Loading',mseedid,bf)
                                tmp = pqlx2psds(sshuserhost,
                                                network = n,
                                                station = s,
                                                location = l,
                                                channel = c,
                                                start = UTCDateTime(bf[0]),
                                                end = UTCDateTime(bf[1]),
                                                **args)
                                print('Computing',mseedid,bf)
                                if slow:
                                    tmp.dRMS(freqs=freqs)
                                else:
                                    tmp.dfRMS(freqs=freqs,output=output)

                                if mseedid not in tmp.displacement_RMS:
                                    print('Missing',mseedid,bf)
                                    continue
                                print('Appending',mseedid,bf)
                                store.append(mseedid.replace('.','_'),
                                             tmp.displacement_RMS[mseedid])
                        print('Selecting',mseedid,(start.datetime,end.datetime))   
                        if '/'+mseedid.replace('.','_') in store:
                            tmp = store.select(mseedid.replace('.','_'),
                                               columns=["%.1f-%.1f"%f for f in freqs],
                                               where=['index>=start.datetime and index<=end.datetime'])
                            self.displacement_RMS[mseedid] = tmp
                            if tocsv:
                                self.displacement_RMS[mseedid].to_csv("%s%s.csv" % (save,mseedid))
                        else:
                            print('Missing',mseedid,(start.datetime,end.datetime))
        store.close()
def hourmap(data,
            bans = {"2020-03-13":'Groups >100 banned',
                    "2020-03-20":'Groups >5 banned'},
            ax=None,
            scale = 1e9,
            unit = 'nm'):
    """
    Make a polar plot of rms

    :type data: dataframe.
    :param data: the rms.
    :type bans: dict.
    :param bans: some annotation, keys are date strings, fields are text desc strings.
    :type ax: axe.
    :param ax: use the provided exiting axe if provided.
    :type scale: float.
    :param scale: scale amplitudes (to nm by default).
    :type unit: string
    :param unit: units for amplitudes (to nm by default).
    :return: A axe with the plot.

    .. rubric:: Basic Usage

    You may omit bans, ax and scale parameters.

    >>> ax = hourmap(data[mseedid])
    """
    origin_time = data.index[0]
    origin_text = data.index[0].strftime("%Y-%m-%d")
    data = data.copy()
    data *= scale

    vmin, vmax = data.quantile(0.01), data.quantile(0.95)
    data = pivot_for_hourmap(data)

    if ax is None:
        ax=plt.figure(figsize=(7,9)).add_subplot(111, projection='polar')

    ax.grid(color='w',
            # path_effects=[pe.withStroke(linewidth=2,foreground='w')]
            )
    ax.set_xticks(np.linspace(0, np.pi * 2 * 23 / 24, 24))
    ax.set_xticklabels(['%d h' % h for h in range(24)])
    ax.set_theta_zero_location("N")
    ax.set_theta_direction(-1)

    X = np.append(data.columns, 2 * np.pi)
    Y = np.append(data.index, data.index[-1] + 1)

    plt.pcolormesh(X, Y, data, vmax=vmax, vmin=vmin,
                   rasterized=True, antialiased=True)
    cb = plt.colorbar(orientation='horizontal', shrink=0.8)
    cb.ax.set_xlabel("Displacement (%s)" % unit)
    ax.set_rorigin(max(Y) / -4)
    ax.text(np.pi, max(Y) / -4,
            origin_text,
            ha='center', va='center')
    ax.set_xlabel(origin_text)
    ax.grid(color='w',)
    ax.set_rmax(max(Y))

    if bans is not None:
        rticks = [((UTCDateTime(ban).datetime - origin_time.to_pydatetime()).days) for iban, ban in enumerate(bans.keys())]
        xticks = [(UTCDateTime(ban).datetime.hour/24+UTCDateTime(ban).datetime.minute/60/24)*np.pi*2 for iban,ban in enumerate(bans.keys())]
        labels = [bans[iban] for iban in bans.keys()]
        xticks = [xticks[i] for i,d in enumerate(rticks) if d>0]
        labels = [labels[i] for i,d in enumerate(rticks) if d>0]
        rticks = [d for d in rticks if d>0]
        ax.set_rticks(rticks)
        for x,r,l,c in zip(xticks,
                           rticks,
                           labels,
                           range(len(labels))):
            ax.plot(x,r,'o',
                    label='\n'.join(wrapper.wrap(l)),
                    color='C%d'%c,
                    path_effects=[pe.withStroke(linewidth=5,
                                                foreground='w'),
                                  pe.withStroke(linewidth=3,
                                                foreground='k')])

    plt.legend(loc='lower left',
               bbox_to_anchor= (0.0, -0.2), 
               ncol=2,
               borderaxespad=0, 
               frameon=False)

    return ax
def gridmap(data,
            bans = {"2020-03-13":'Groups >100 banned',
                    "2020-03-20":'Groups >5 banned'},
            ax=None,
            scale = 1e9,
            unit = 'nm'):
    """
    Make a polar plot of rms

    :type data: dataframe.
    :param data: the rms.
    :type bans: dict.
    :param bans: some annotation, keys are date strings, fields are text desc strings.
    :type ax: axe.
    :param ax: use the provided exiting axe if provided.
    :type scale: float.
    :param scale: scale amplitudes (to nm by default).
    :type unit: string
    :param unit: units for amplitudes (to nm by default).
    :return: A axe with the plot.

    .. rubric:: Basic Usage

    You may omit bans, ax and scale parameters.

    >>> ax = gridmap(data[mseedid])
    """
    origin_time = data.index[0]
    origin_text = data.index[0].strftime("%Y-%m-%d")
    data = data.copy()
    data *= scale

    vmin, vmax = data.quantile(0.01), data.quantile(0.95)
    days = pd.DatetimeIndex(np.unique(data.index.strftime("%Y-%m-%d")))
    data = pivot_for_hourmap(data, columns='hours')

    if ax is None:
        fig, ax = plt.subplots(1, 1, figsize=(16, 5))

    X = pd.date_range(origin_text, periods=len(data) + 1).to_pydatetime()
    Y = np.append(data.columns, 24)

    plt.pcolormesh(X, Y, data.T, vmax=vmax, vmin=vmin,
                   rasterized=True, antialiased=True)
    plt.colorbar(shrink=0.7, pad=0.01).set_label("Displacement (%s)" % unit)
    ax.set_xticks(pd.date_range(X[0], X[-1], freq="W-MON").to_pydatetime())
    ax.set_yticks(np.arange(25))
    ax.set_yticklabels(['%d h' % h for h in range(25)])

    # fig.autofmt_xdate()
    plt.grid(True, which='both', c="k")
    plt.tight_layout()

    if bans is not None:
        yticks = [UTCDateTime(ban).hour + UTCDateTime(ban).minute/60. for ban in bans]
        xticks = [UTCDateTime(ban[:11]).datetime for ban in bans]
        labels = [bans[iban] for iban in bans.keys()]
        for x,y,l,c in zip(xticks,
                           yticks,
                           labels,
                           range(len(labels))):
            ax.plot(x,y,'o',
                    label='\n'.join(wrapper.wrap(l)),
                    color='C%d'%c,
                    path_effects=[pe.withStroke(linewidth=5,
                                                foreground='w'),
                                  pe.withStroke(linewidth=3,
                                                foreground='k')])

    plt.legend(loc='lower left',
               bbox_to_anchor= (0.0, -0.2),
               ncol=2,
               borderaxespad=0,
               frameon=False)
    plt.gcf().autofmt_xdate()
    return ax
def pqlx2psds(sshuserhost,
              network = 'CH',
              station = 'SGEV',
              location = '',
              channel = 'HGZ,HGE,HGN',
              dbname = 'AllNetworks',
              start = UTCDateTime()-3*24*60*60,#"2020-03-07")
              end = UTCDateTime(),# means "now"
              blocksize = 31*24*2, # equivalent to 9 days 1 channel
              save='./',
              self = None):
    """
    Get PSDs from PQLX
    
    :type sshuserhost: string.
    :param sshuserhost: ssh connection string, e.g. login@hostname.
    :type network,station,location,channel: string.
    :param network,station,location,channel: the mseed codes, use ',' as separator to get several channels.
    :type start, end: `obspy.UTCDateTime``.
    :param start, end: time window.
    :type freqs: list of tuples.
    :param freqs: frequency ranges (one each tuple).
    :return: `PSDs`object.

    .. rubric:: Basic Usage

    You may omit everyhting but sshuserhost.

    >>>myPSDs = sqlx2drms('login@hostname')
    """
    rflag=False
    if self is None:
        rflag=True
        self=PSDs()
    commands = []
    files = []
    datelist = pd.date_range(start.datetime,
                             end.datetime,
                             freq="30min")
    for date1 in datelist:
        date2 = date1+pd.Timedelta(minutes=30)
        date3 = date1+pd.Timedelta(minutes=15)
        if date2 > end.datetime:
            break
        for n in network.split(','):
            for s in station.split(','):
                for l in location.split(','):
                    for c in channel.split(','):
                        savef = '%s/%s/%s/%s%s/%s/%s'%(save,
                                                       n,s,l,c,
                                                       date1.strftime("%Y-%m-%d"),
                                                       date1.strftime('%X').replace(':','.'))
                        mseedid = '.'.join([n,s,l,c])
                        command = 'exPSDhour'
                        command += ' AllNetworks'
                        command += ' %s'%mseedid.replace('..','.--.').replace('.',' ')
                        command += ' %s'%date1.strftime("%Y-%m-%d")
                        command += ' %s'%date2.strftime("%Y-%m-%d")
                        command += ' %s'%date1.strftime('%X')
                        command += ' %s'%date2.strftime('%X')
                        addons = (mseedid,date3.strftime("%Y-%m-%d"),date3.strftime("%X"))
                        command += ' P | sed "s/$/\t%s\t%s\t%s\tmyprecious/"\n'%addons
                        commands += [command]
 
    for c in range(0,len(commands),blocksize):
        ssh = subprocess.Popen(["ssh",
                                "-i .ssh/id_rsa",
                                sshuserhost],#sys.argv[1]],
                               stdin =subprocess.PIPE,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE,
                               universal_newlines=True,
                               bufsize=0)
        stop = c+blocksize
        stop = min([len(commands),stop])
        for cc,command in enumerate(commands[c:stop]):
            ssh.stdin.write(command)
        ssh.stdin.close()

        # Fetch output
        for line in ssh.stdout:        
            if 'myprecious' in line:
                try:
                    data = [v for v in line.strip().split('\t')[:-1]]
                except:
                    print(line.strip(),'unexpected line')
                    continue
                mseedid = data[-3]
                time = UTCDateTime('%s %s'%(data[0],data[1])).datetime
                self.add(time,mseedid)
                self.count[(mseedid,time)] += [1]
                self.psd[(mseedid,time)] += [float(data[3])]
                self.per[(mseedid,time)] += [float(data[2])]
    
    if rflag:
        return self
 parser.add_argument("--dbname", "-d", 
                     help="set dbname, pqlx mode", 
                     default='AllNetworks')
 parser.add_argument("--blocksize", "-x", 
                     help="set blocksize (number PSDs fetched at once)", 
                     type=int,
                     default=31*24*2)
 # Read arguments from the command line
 args = parser.parse_args()
 # Pre-process args
 show=True
 if args.noshow:
     args.show=False
     plt.switch_backend('Agg')
 if not isinstance(args.begin,int):
     args.begin=UTCDateTime(args.begin)
 else:
     args.begin=UTCDateTime()-60*60*24*int(args.begin)
 if not isinstance(args.end,int):
     args.end=UTCDateTime(args.end)
 else:
     args.end=UTCDateTime()-60*60*24*int(args.end)
 args.begin._set_minute(0)
 args.begin._set_second(0)
 args.begin._set_microsecond(0)
 args.end._set_minute(0)
 args.end._set_second(0)
 args.end._set_microsecond(0)
 # Check for --pqlx
 clientpqlx=False
 clientobspy=False
Ejemplo n.º 14
0
    def test_evalresp_with_output_from_seed(self):
        """
        The StationXML file has been converted to SEED with the help of a tool
        provided by IRIS:

        https://seiscode.iris.washington.edu/projects/stationxml-converter
        """
        t_samp = 0.05
        nfft = 16384

        # Test for different output units.
        units = ["DISP", "VEL", "ACC"]
        filenames = ["IRIS_single_channel_with_response", "XM.05", "AU.MEEK"]

        for filename in filenames:
            xml_filename = os.path.join(self.data_dir,
                                        filename + os.path.extsep + "xml")
            seed_filename = os.path.join(self.data_dir,
                                         filename + os.path.extsep + "seed")

            p = Parser(seed_filename)

            # older systems don't like an end date in the year 2599
            t_ = UTCDateTime(2030, 1, 1)
            if p.blockettes[50][0].end_effective_date > t_:
                p.blockettes[50][0].end_effective_date = None
            if p.blockettes[52][0].end_date > t_:
                p.blockettes[52][0].end_date = None

            resp_filename = p.get_resp()[0][-1]

            inv = read_inventory(xml_filename)

            network = inv[0].code
            station = inv[0][0].code
            location = inv[0][0][0].location_code
            channel = inv[0][0][0].code
            date = inv[0][0][0].start_date

            for unit in units:
                resp_filename.seek(0, 0)

                seed_response, seed_freq = evalresp(t_samp,
                                                    nfft,
                                                    resp_filename,
                                                    date=date,
                                                    station=station,
                                                    channel=channel,
                                                    network=network,
                                                    locid=location,
                                                    units=unit,
                                                    freq=True)

                xml_response, xml_freq = \
                    inv[0][0][0].response.get_evalresp_response(t_samp, nfft,
                                                                output=unit)

                self.assertTrue(np.allclose(seed_freq, xml_freq, rtol=1E-5))
                self.assertTrue(
                    np.allclose(seed_response, xml_response, rtol=1E-5))

                # also test getting response for a set of discrete frequencies
                indices = (-2, 0, -1, 1, 2, 20, -30, -100)
                freqs = [seed_freq[i_] for i_ in indices]
                response = inv[0][0][0].response
                got = response.get_evalresp_response_for_frequencies(
                    freqs, output=unit)
                expected = [seed_response[i_] for i_ in indices]
                np.testing.assert_allclose(got, expected, rtol=1E-5)
Ejemplo n.º 15
0
def _preprocessh5_single(phase: str, rot: str, pol: str, taper_perc: float,
                         model: obspy.taup.TauPyModel, taper_type: str,
                         tz: int, ta: int, rfloc: str, deconmeth: str,
                         hc_filt: float, logger: logging.Logger,
                         rflogger: logging.Logger, hdf5_file: str,
                         evtcat: obspy.Catalog, remove_response: bool):
    """
    Single core processing of one single hdf5 file.

    .. warning:: Should not be called use
        :func:`~seismic.waveform.preprocessh5.preprocess_h5`!
    """
    f = hdf5_file
    net, stat, _ = os.path.basename(f).split('.')
    code = '%s.%s' % (net, stat)

    outf = os.path.join(rfloc, code)

    # Find out which files have already been processed:
    if os.path.isfile(outf + '.h5'):
        with RFDataBase(outf) as rfdb:
            ret, rej = rfdb._get_known_waveforms()
            rflogger.debug('Already processed waveforms: %s' % str(ret))
            rflogger.debug('\nAlready rejected waveforms: %s' % str(rej))
    else:
        ret = []
        rej = []
    rflogger.info(f'Processing Station {code}')
    with ASDFDataSet(f, mode='r', mpi=False) as ds:
        # get station inventory
        try:
            inv = ds.waveforms[code].StationXML
        except KeyError:
            logger.exception(
                f'Could not find station inventory for Station {net}.{stat}')
        rf = RFStream()
        # There has to be a smarter way to do this. Only some events
        # have a corresponding waveform
        # At least only compute theoretical arrival if the distance is within
        # thresholds

        # Which times are available as raw data?
        t_raw = [
            tr.stats.starttime for tr in ds.waveforms[code]['raw_recording']
        ]
        t_raw_min = min(t_raw) - 600
        t_raw_max = max(t_raw) + 600
        # c_date = inv[0][0].creation_date
        # t_date = inv[0][0].termination_date
        for evt in tqdm(evtcat):
            # Already processed?
            ot = (evt.preferred_origin() or evt.origins[0]).time
            ot_fiss = UTCDateTime(ot).format_fissures()
            if ot_fiss in rej or ot_fiss in ret:
                rflogger.debug('RF with ot %s already processed.' % ot_fiss)
                continue
            # Skip events with no data.
            if ot < t_raw_min or t_raw_max < ot:
                rflogger.debug(f'No raw data for event {ot_fiss}.')
                continue
            try:
                toa, rayp, rayp_s_deg, baz, distance = compute_toa(
                    evt, inv[0][0].latitude, inv[0][0].longitude, phase, model)
            except IndexError:
                rflogger.debug('Phase not viable for epicentral distance')
                continue
            except ValueError as e:
                rflogger.debug(e)
                continue
            st = ds.get_waveforms(net, stat, '*', '*', toa - tz, toa + ta,
                                  'raw_recording')
            if not st.count():
                logger.info(
                    f'No traces found for Station {net}.{stat} and arrival ' +
                    f'time {toa}')
                continue
            try:
                rf_temp = __station_process__(st, inv, evt, phase, rot, pol,
                                              taper_perc, taper_type, tz, ta,
                                              deconmeth, hc_filt, logger,
                                              rflogger, net, stat, baz,
                                              distance, rayp, rayp_s_deg, toa,
                                              rej, ret, remove_response)
            except Exception as e:
                rflogger.exception(
                    'RF Creation failed. Waveform Data:\n' +
                    f'{net}.{stat}.{ot_fiss}\noriginal error:\n' + f'{e}')
                continue
            if rf_temp is not None:
                rf.append(rf_temp)
            # Write regularly to not clutter too much into the RAM
            if rf.count() >= 20:
                rflogger.info('Writing to file %s....' % outf)
                with RFDataBase(outf) as rfdb:
                    rfdb.add_rf(rf)
                    rfdb._add_known_waveform_data(ret, rej)
                rflogger.info('..written.')
                rf.clear()
    rflogger.info('Writing to file %s....' % outf)
    with RFDataBase(outf) as rfdb:
        rfdb.add_rf(rf)
        rfdb._add_known_waveform_data(ret, rej)
    rflogger.info('..written.')
    rf.clear()
Ejemplo n.º 16
0
from obspy import read,UTCDateTime
from numpy import mean,where,log,log10
from mudpy.forward import lowpass as bandpass
from mtspec import mtspec
from matplotlib import pyplot as plt
from scipy.interpolate import interp1d
import matplotlib as mpl
from scipy.signal import spectrogram



plt.ioff()

#stations=['AP01','PB12','PB16','MNMCX','PSGCX']
stations=['GO01','HMBCX','PB08','PB11','TA01','AP01','PB12','PB16','MNMCX','PSGCX']
time_epi=UTCDateTime('2014-04-01T23:46:47Z')
sim_path=u'/Volumes/Illapel/FQ/iquique/output/waveforms/iquique.000000/'
data_path=u'/Users/dmelgar/Iquique2014/SAC/PROC/'
tcut=120
fcorner=[1./10,20]
vmin=-7 ; vmax=0
cmap_spec=plt.cm.jet

mpl.rcParams['xtick.labelsize'] = 14
mpl.rcParams['ytick.labelsize'] = 14


for k in range(len(stations)):
    
    sta=stations[k]
    print sta
Ejemplo n.º 17
0
def __station_process__(st, inv, evt, phase, rot, pol, taper_perc, taper_type,
                        tz, ta, deconmeth, hc_filt, logger, rflogger, net,
                        stat, baz, distance, rayp, rayp_s_deg, toa,
                        rej: List[str], ret: List[str], remove_response: bool):
    """
    Processing that is equal for each waveform recorded on one station
    """
    # Is the data already processed?
    origin = (evt.preferred_origin() or evt.origins[0])
    ot_fiss = UTCDateTime(origin.time).format_fissures()
    # ot_loc = UTCDateTime(origin.time, precision=-1).format_fissures()[:-6]

    # Remove repsonse
    if remove_response:
        st.attach_response(inv)
        st.remove_response()

    # DEMEAN AND DETREND #
    st.detrend(type='demean')

    # TAPER #
    st.taper(max_percentage=taper_perc,
             type=taper_type,
             max_length=None,
             side='both')

    infodict = {}

    # create RF
    try:
        st, _, infodict = __rotate_qc(phase, st, inv, net, stat, baz, distance,
                                      ot_fiss, evt, origin.latitude,
                                      origin.longitude, origin.depth,
                                      rayp_s_deg, toa, logger, infodict, tz,
                                      pol)
        if hc_filt:
            st.filter('lowpass', freq=hc_filt, zerophase=True, corners=2)
        # Rotate to LQT or PSS
        if rot == "LQT":
            st, ia = rotate_LQT_min(st, phase)
            # additional QC
            if ia < 5 or ia > 75:
                raise SNRError(
                    "The estimated incidence angle is unrealistic with" +
                    '%s degree.' % str(ia))

        elif rot == "PSS":
            _, _, st = rotate_PSV(inv[0][0][0].latitude,
                                  inv[0][0][0].longitude, rayp, st, phase)

        # Create RF object
        if phase[-1] == "S":
            trim = [40, 0]
            if distance >= 70:
                trim[1] = ta - (-2 * distance + 180)
            else:
                trim[1] = ta - 40
        elif phase[-1] == "P":
            trim = False

        RF = createRF(st,
                      phase,
                      pol=pol,
                      info=infodict,
                      trim=trim,
                      method=deconmeth)
        ret.append(ot_fiss)

    except SNRError as e:
        rflogger.info(f'{e} {ot_fiss}')
        rej.append(ot_fiss)
        return None

    except Exception as e:
        rflogger.exception('RF Creation failed. Waveform Data:\n' +
                           f'{net}.{stat}.{ot_fiss}\noriginal error:\n' +
                           f'{e}')
        return None

    return RF
Ejemplo n.º 18
0
import time
import zmq

from obspy import UTCDateTime

from detector.send_receive.tcp_server import TcpServer

context = zmq.Context()
server = TcpServer('tcp://*:5555', context)

while True:
    server.send(str(UTCDateTime()).encode())
    time.sleep(1)

Ejemplo n.º 19
0
def validate_sac_content(hf, hi, hs, data, *tests):
    """
    Check validity of loaded SAC file content, such as header/data consistency.

    :param hf: SAC float header array
    :type hf: :class:`numpy.ndarray` of floats
    :param hi: SAC int header array
    :type hi: :class:`numpy.ndarray` of ints
    :param hs: SAC string header array
    :type hs: :class:`numpy.ndarray` of str
    :param data: SAC data array
    :type data: :class:`numpy.ndarray` of float32
    :param tests: One or more of the following validity tests:
        'delta' : Time step "delta" is positive.
        'logicals' : Logical values are 0, 1, or null
        'data_hdrs' : Length, min, mean, max of data array match header values.
        'enums' : Check validity of enumerated values.
        'reftime' : Reference time values in header are all set.
        'reltime' : Relative time values in header are absolutely referenced.
        'all' : Do all tests.
    :type tests: str

    :raises: :class:`SacInvalidContentError` if any of the specified tests
        fail. :class:`ValueError` if 'data_hdrs' is specified and data is None,
        empty array, or no tests specified.

    """
    # TODO: move this to util.py and write and use individual test functions,
    # so that all validity checks are in one place?
    _all = ('delta', 'logicals', 'data_hdrs', 'enums', 'reftime', 'reltime')

    if 'all' in tests:
        tests = _all

    if not tests:
        raise ValueError("No validation tests specified.")
    elif any([(itest not in _all) for itest in tests]):
        msg = "Unrecognized validataion test specified"
        raise ValueError(msg)

    if 'delta' in tests:
        dval = hf[HD.FLOATHDRS.index('delta')]
        if not (dval >= 0.0):
            msg = "Header 'delta' must be >= 0."
            raise SacInvalidContentError(msg)

    if 'logicals' in tests:
        for hdr in ('leven', 'lpspol', 'lovrok', 'lcalda'):
            lval = hi[HD.INTHDRS.index(hdr)]
            if lval not in (0, 1, HD.INULL):
                msg = "Header '{}' must be {{{}, {}, {}}}."
                raise SacInvalidContentError(msg.format(hdr, 0, 1, HD.INULL))

    if 'data_hdrs' in tests:
        try:
            is_min = np.allclose(hf[HD.FLOATHDRS.index('depmin')], data.min())
            is_max = np.allclose(hf[HD.FLOATHDRS.index('depmax')], data.max())
            is_mean = np.allclose(hf[HD.FLOATHDRS.index('depmen')],
                                  data.mean())
            if not all([is_min, is_max, is_mean]):
                msg = "Data headers don't match data array."
                raise SacInvalidContentError(msg)
        except (AttributeError, ValueError) as e:
            msg = "Data array is None, empty array, or non-array. " + \
                  "Cannot check data headers."
            raise SacInvalidContentError(msg)

    if 'enums' in tests:
        for hdr in HD.ACCEPTED_VALS:
            enval = hi[HD.INTHDRS.index(hdr)]
            if not is_valid_enum_int(hdr, enval, allow_null=True):
                msg = "Invalid enumerated value, '{}': {}".format(hdr, enval)
                raise SacInvalidContentError(msg)

    if 'reftime' in tests:
        nzyear = hi[HD.INTHDRS.index('nzyear')]
        nzjday = hi[HD.INTHDRS.index('nzjday')]
        nzhour = hi[HD.INTHDRS.index('nzhour')]
        nzmin = hi[HD.INTHDRS.index('nzmin')]
        nzsec = hi[HD.INTHDRS.index('nzsec')]
        nzmsec = hi[HD.INTHDRS.index('nzmsec')]

        # all header reference time fields are set
        if not all([
                val != HD.INULL
                for val in [nzyear, nzjday, nzhour, nzmin, nzsec, nzmsec]
        ]):
            msg = "Null reference time values detected."
            raise SacInvalidContentError(msg)

        # reference time fields are reasonable values
        try:
            UTCDateTime(year=nzyear,
                        julday=nzjday,
                        hour=nzhour,
                        minute=nzmin,
                        second=nzsec,
                        microsecond=nzmsec)
        except ValueError as e:
            raise SacInvalidContentError("Invalid reference time: %s" % str(e))

    if 'reltime' in tests:
        # iztype is set and points to a non-null header value
        iztype_val = hi[HD.INTHDRS.index('iztype')]
        if is_valid_enum_int('iztype', iztype_val, allow_null=False):
            if iztype_val == 9:
                hdr = 'b'
            elif iztype_val == 11:
                hdr = 'o'
            elif iztype_val == 12:
                hdr = 'a'
            elif iztype_val in range(13, 23):
                hdr = 'it' + str(iztype_val - 13)

            if hi[HD.FLOATHDRS.index(hdr)] == HD.INULL:
                msg = "Reference header '{}' for iztype '{}' not set."
                raise SacInvalidContentError(msg.format(hdr, iztype_val))

        else:
            msg = "Invalid iztype: {}".format(iztype_val)
            raise SacInvalidContentError(msg)

    return
Ejemplo n.º 20
0
def dive(mfloat,date_begin,date_end):
        # Set the path for the float
        mfloat_path = "../processed/" + mfloat + "/"
        #set the filter Date
        filterDate = [(mfloat,date_begin,date_end)]
        # Get float number
        mfloat_nb = re.findall("(\d+)$", mfloat)[0]

        # Copy appropriate files in the directory
        for f in glob.glob("../processed/"+ mfloat +"/processed/*/*.LOG.h"):
            shutil.copy(f, mfloat_path)

        for f in glob.glob("../processed/"+ mfloat +"/processed/*/*.MER.env"):
            shutil.copy(f, mfloat_path)

        for f in glob.glob("../processed/"+ mfloat +"/processed/*.LOG.h"):
            shutil.move(f, f[0:len(f)-2])

        for f in glob.glob("../processed/"+ mfloat +"/processed/*.MER.env"):
            sp=f.split(".")
            shutil.move(f, "../processed/"+ mfloat +"/"+sp[5]+".MER")

        # Build list of all mermaid events recorded by the float
        mevents = events.Events(mfloat_path)

        # Build list of all profiles recorded
        ms41s = profile.Profiles(mfloat_path)

        # Process data for each dive
        mdives = dives.get_dives(mfloat_path, mevents, ms41s)

        # Filter dives between begin and end date
        for fd in filterDate:
            fname = fd[0]
            begin = fd[1]
            end = fd[2]
            if fname == mfloat:
                mdives = [dive for dive in mdives if begin <= dive.date <= end]

        # Software version
        print ""
        print "Software version"
        for dive in mdives:
            if dive.is_init:
                formatted_log = dive.log_content #utils.format_log(dive.log_content)
                soft_version = re.findall(".+soft.+", formatted_log)
                if len(soft_version) == 0:
                    print str(UTCDateTime(dive.date).isoformat()) + " : No software version available"
                    continue
                print re.findall(".+soft.+", formatted_log)[0]

        # Find errors and warnings
        print ""
        print "List of errors"
        for dive in mdives:
            if dive.is_complete_dive:
                formatted_log = dive.log_content #utils.format_log(dive.log_content)
                for err in re.findall(".+<ERR>.+", formatted_log):
                    print err

        print ""
        print "List of warnings"
        for dive in mdives:
            if dive.is_complete_dive:
                formatted_log = dive.log_content #utils.format_log(dive.log_content)
                for wrn in re.findall(".+<WRN>.+", formatted_log):
                    print wrn

        # Synchronisations GPS
        print ""
        print "Synchronisations GPS"
        pps_detect_list = list()
        gpsack_list = list()
        gpsoff_list = list()
        position_list = list()
        for dive in mdives:
            if dive.is_complete_dive:
                formatted_log = dive.log_content #utils.format_log(dive.log_content)
                pps_detect_list += re.findall(".+PPS.+", formatted_log)
                gpsack_list += re.findall(".+GPSACK.+", formatted_log)
                gpsoff_list += re.findall(".+GPSOFF.+", formatted_log)
                position_list += re.findall(".+N\d+deg\d+\.\d+mn,.*E\d+deg\d+\.\d+mn.+", formatted_log)

        if len(pps_detect_list) != len(gpsack_list) and len(gpsack_list) != len(gpsoff_list) \
                and len(gpsoff_list) != len(position_list):
            print "LENGTH ERROR !!!!"
        else:
            for pps_detect in pps_detect_list:
                print pps_detect
            for gpsack in gpsack_list:
                print gpsack
            for gpsoff in gpsoff_list:
                print gpsoff
            for position in position_list:
                print position

        # Get dive number
        dive_nb = 0
        for dive in mdives:
            if dive.is_complete_dive:
                dive_nb += 1

        # Temps de pompe pour le bladder full en fin de plongee
        print ""
        print "Temps de pompe pour le bladder full en fin de plongee (s):"
        temps_bladder_full = list()
        for dive in mdives:
            if dive.is_complete_dive:
                start_filling_date = utils.find_timestampedUTC_values("filling external bladder", dive.log_content)[-1][1]
                bdf_time = 300*5
                try :
                    bladder_full_date = utils.find_timestampedUTC_values("external bladder full", dive.log_content)[-1][1]
                except :
                    print "No external bladder full : " + dive.log_name
                else :
                    bdf_time = int(UTCDateTime(bladder_full_date) - UTCDateTime(start_filling_date))
                temps_bladder_full.append(bdf_time)
                print str(UTCDateTime(dive.date).isoformat()) + " : " + str(bdf_time)
        temps_bladder_full_moyen = int(float(sum(temps_bladder_full)) / dive_nb)
        print "Temps moyen (s): " + str(temps_bladder_full_moyen)
        print "Temps moyen (h:min:s): 00:" + str(temps_bladder_full_moyen/60) + ":" + str(temps_bladder_full_moyen % 60)

        # Consommation de la pompe pendant le bladder full
        print ""
        print "Consommation de la pompe pendant le bladder full (amperes):"
        amp_val_list = list()
        for dive in mdives:
            if dive.is_complete_dive:
                all_filling_str = utils.find_timestampedUTC_values("filling external bladder", dive.log_content)
                if len(all_filling_str) == 0:
                    print str(UTCDateTime(dive.date).isoformat()) + " : Pas de debut de remplissage de vessie"
                    continue
                start_filling_date = all_filling_str[-1][1]
                all_bladder_full_str = utils.find_timestampedUTC_values("external bladder full", dive.log_content)
                if len(all_bladder_full_str) == 0:
                    print str(UTCDateTime(dive.date).isoformat()) + " : Pas de fin de remplissage de vessie"
                    continue
                bladder_full_date =  all_bladder_full_str[-1][1]
                bladder_full_power = utils.find_timestampedUTC_values("battery.+", dive.log_content)
                max_pwr = str(UTCDateTime(dive.date).isoformat()) + ": " + "Aucune mesure (" + dive.log_name + ")"
                max_amp = 0
                for bfp in bladder_full_power:
                    bfp_date = bfp[1]
                    if bladder_full_date > bfp_date > start_filling_date:
                        amp_val = int(re.findall("(\d+)uA", bfp[0])[0])
                        if amp_val > max_amp:
                            # On cherche la valeur la plus elevee de la plongee
                            max_amp = amp_val
                            max_pwr = str(UTCDateTime(bfp_date).isoformat()) + ": " + str(round(float(amp_val) / 1000000., 2)) + "A"
                # Pour chaque plongee on affiche la valeur de courant max et on enrigstre sa valeur dans une liste
                print max_pwr
                amp_val_list += [max_amp]
        print "Consommation moyenne: " + str(round(float(sum(amp_val_list)) / len(amp_val_list) / 1000000., 2)) + "A"

        # Temps de bypass
        print ""
        print "Temps de bypass (s):"
        temps_bypass = []
        nb_ouverture_secondaire_bypass = []
        for dive in mdives:
            if dive.is_complete_dive:
                bypass_all_str = re.findall(":\[BYPASS.+\].*opening (\d+)ms", dive.log_content)
                if len(bypass_all_str) == 0:
                    print str(UTCDateTime(dive.date).isoformat()) + " : Pas de coups de bypass "
                    continue
                bypass_first = int(bypass_all_str[0])
                bypass_second = [int(x) for x in bypass_all_str[1:]]
                temps_bypass += [bypass_first + sum(bypass_second)]
                print str(UTCDateTime(dive.date).isoformat()) + " : " + str((bypass_first + sum(bypass_second))/1000)
                nb_ouverture_secondaire_bypass += [len(bypass_second)]
        print "Nombre d'ouvreture secondaires: " + str(nb_ouverture_secondaire_bypass)
        # print "Temps total (s): " + str(sum(temps_bypass)/1000)
        temps_bypass_moyen = int(float(sum(temps_bypass)) / dive_nb)/1000
        print "Temps moyen (s): " + str(temps_bypass_moyen)

        # Rapport (temps pour le bladder full) / (temps de bypass)
        print ""
        print "Rapport (temps pour le bladder full) / (temps de bypass):"
        print str(round(float(temps_bladder_full_moyen) / float(temps_bypass_moyen), 1))

        # Temps de pompe en plongee
        print ""
        print "Temps de pompe en plongee (s):"
        temps_pompe = []
        temps_pompe_min = 600000
        for dive in mdives:
            if dive.is_complete_dive:
                all_filling_str = utils.find_timestampedUTC_values("filling external bladder", dive.log_content)
                if len(all_filling_str) == 0:
                    print str(UTCDateTime(dive.date).isoformat()) + " : Pas de remplissage de vessie, ne peut pas estimer une fin de plongee"
                    continue
                start_filling_date = all_filling_str[-1][1]
                all_bypass_str = utils.find_timestampedUTC_values(":\[BYPASS.+\].*opening (\d+)ms", dive.log_content)
                if len(all_bypass_str) == 0:
                    print str(UTCDateTime(dive.date).isoformat()) + " : Pas de coup de bypass, ne peut pas estimer un debut de plongee"
                    continue
                first_bypass_date = all_bypass_str[0][1]
                temps_pompe_timestamp_str = utils.find_timestampedUTC_values(":\[PUMP.+\].*during (\d+)ms", dive.log_content)
                liste_activation_pompe = [int(tp[0]) for tp in temps_pompe_timestamp_str if tp[1] < start_filling_date and tp[1] > first_bypass_date]
                for time in liste_activation_pompe :
                    if time < temps_pompe_min :
                        temps_pompe_min = time
                temps_total_pompe_par_plongee = sum(liste_activation_pompe)
                temps_pompe += [temps_total_pompe_par_plongee]
                print str(UTCDateTime(dive.date).isoformat()) + " : " + str(round(float(temps_total_pompe_par_plongee) / 1000, 3))
        # print "Temps total (s): " + str(sum(temps_pompe)/1000)
        temps_pompe_moyen = int(float(sum(temps_pompe)) / dive_nb)/1000
        print "Temps moyen (s): " + str(temps_pompe_moyen)
        print "Temp min (ms): " + str(temps_pompe_min)

        # Temps de valve
        print ""
        print "Temps de valve (s):"
        temps_valve = []
        temps_valve_min = 60000
        for dive in mdives:
            if dive.is_complete_dive:
                temps_valve_str = re.findall(":\[VALVE.+\].*opening f?o?r? ?(\d+)ms", dive.log_content)
                liste_activation_valve = [int(tv) for tv in temps_valve_str]
                for time in liste_activation_valve :
                    if time < temps_valve_min:
                        temps_valve_min = time
                temps_total_valve_par_plongee = sum(liste_activation_valve)
                temps_valve += [temps_total_valve_par_plongee]
                print str(UTCDateTime(dive.date).isoformat()) + " : " + str(round(float(temps_total_valve_par_plongee) / 1000, 3))
        # print "Temps total (ms): " + str(sum(temps_valve))
        temps_valve_moyen = float(sum(temps_valve)) / dive_nb / 1000
        print "Temps moyen (s): " + str(round(temps_valve_moyen, 3))
        print "Temps min (ms): " + str(temps_valve_min)

        # Rapport (temps pour le bladder full) / (temps de bypass)
        print ""
        print "Rapport (temps de pompe en plongee) / (temps de valve):"
        print str(round(float(temps_pompe_moyen) / float(temps_valve_moyen), 1))

        # Clean directories
        for f in glob.glob(mfloat_path + "/" + "*.LOG.h"):
            os.remove(f)
        for f in glob.glob(mfloat_path + "/" + "*.MER.env"):
            os.remove(f)
Ejemplo n.º 21
0
    def get_PGD_with_time(self, tinit='ptime', tfinal=200):
        '''
        Get PGD for every station as a function of time
        
        '''

        from numpy import where, c_, array, nan
        from obspy import read
        from obspy import UTCDateTime
        from string import rjust
        from scipy import maximum

        self.PGD_north_with_time = []
        self.PGD_east_with_time = []
        self.PGD_up_with_time = []
        self.PGD_cart_with_time = []

        hypo_time = UTCDateTime(self.hypo_time)

        for k in range(self.Nsta):

            #Are the waveform slices empty?
            empty = False

            path = self.path_to_data

            if tinit == 'ptime':
                tstart = self.ptime[k]
            else:
                tstart = self.stime[k]

            #read waveforms
            try:
                sta = rjust(str(self.station_names[k]), 4, '0')
                n = read(path + sta + '.LXN.sac')
                e = read(path + sta + '.LXE.sac')
                z = read(path + sta + '.LXZ.sac')
            except:
                print 'ERROR: ' + self.station_names[k] + '.LXN.sac not found'
                return

            #Trim from event origin to end
            n[0].trim(starttime=hypo_time, endtime=hypo_time + tfinal)
            e[0].trim(starttime=hypo_time, endtime=hypo_time + tfinal)
            z[0].trim(starttime=hypo_time, endtime=hypo_time + tfinal)

            #FIND TIMES BEFORE TSTART (p or s time)
            inorth = where(n[0].times() < tstart)[0]
            ieast = where(e[0].times() < tstart)[0]
            iup = where(z[0].times() < tstart)[0]

            if n[0].stats.npts == 0 or e[0].stats.npts == 0 or z[
                    0].stats.npts == 0:
                empty = True
            else:
                #Get pre-event mean to remove from waveform
                mean_n = n[0].data[inorth].mean()
                mean_e = e[0].data[ieast].mean()
                mean_z = z[0].data[iup].mean()

            if n[0].stats.npts == 0 or e[0].stats.npts == 0 or z[
                    0].stats.npts == 0:
                empty = True
            else:  #de-mean and make things before arrival =0 (no PGD possible)
                n[0].data = n[0].data - mean_n
                e[0].data = e[0].data - mean_e
                z[0].data = z[0].data - mean_z
                n[0].data[inorth] = 0
                e[0].data[ieast] = 0
                z[0].data[iup] = 0

                #Construct "cartesian" sum waveform
                c = n.copy()
                c[0].data = (n[0].data**2 + e[0].data**2 + z[0].data**2)**0.5

            if empty == False:  #there was enough data
                #get PGD include time vector
                north = c_[n[0].times(), maximum.accumulate(abs(n[0].data))]
                east = c_[e[0].times(), maximum.accumulate(abs(e[0].data))]
                up = c_[z[0].times(), maximum.accumulate(abs(z[0].data))]
                cart = c_[c[0].times(), maximum.accumulate(abs(c[0].data))]

                self.PGD_north_with_time.append(north)
                self.PGD_east_with_time.append(east)
                self.PGD_up_with_time.append(up)
                self.PGD_cart_with_time.append(cart)

            else:
                self.PGD_north_with_time.append(array([nan]))
                self.PGD_east_with_time.append(array([nan]))
                self.PGD_up_with_time.append(array([nan]))
                self.PGD_cart_with_time.append(array([nan]))
Ejemplo n.º 22
0
def parse_query(query):
    """Parse request arguments into a set of parameters

    Parameters
    ----------
    query: Immutable Dict
        request.args object

    Returns
    -------
    WebServiceQuery
        parsed query object

    Raises
    ------
    WebServiceException
        if any parameters are not supported.
    """
    # Get values
    observatory_id = query.get("id")
    starttime = query.get("starttime")
    endtime = query.get("endtime")
    elements = query.getlist("elements")
    sampling_period = query.get("sampling_period", DEFAULT_SAMPLING_PERIOD)
    data_type = query.get("type", DEFAULT_DATA_TYPE)
    output_format = query.get("format", DEFAULT_OUTPUT_FORMAT)
    # Parse values and set defaults
    if len(elements) == 0:
        elements = DEFAULT_ELEMENTS
    if len(elements) == 1 and "," in elements[0]:
        elements = [e.strip() for e in elements[0].split(",")]
    if not starttime:
        now = datetime.now()
        starttime = UTCDateTime(year=now.year, month=now.month, day=now.day)
    else:
        try:
            starttime = UTCDateTime(starttime)
        except Exception as e:
            raise WebServiceException(
                f"Bad starttime value '{starttime}'."
                " Valid values are ISO-8601 timestamps.") from e
    if not endtime:
        endtime = starttime + (24 * 60 * 60 - 1)
    else:
        try:
            endtime = UTCDateTime(endtime)
        except Exception as e:
            raise WebServiceException(
                f"Bad endtime value '{endtime}'."
                " Valid values are ISO-8601 timestamps.") from e
    try:
        sampling_period = float(sampling_period)
    except ValueError as e:
        raise WebServiceException(
            f"Bad sampling_period {sampling_period}"
            ", valid values are {','.join(VALID_SAMPLING_PERIODS)}") from e
    # Create WebServiceQuery object and set properties
    params = WebServiceQuery()
    params.observatory_id = observatory_id
    params.starttime = starttime
    params.endtime = endtime
    params.elements = elements
    params.sampling_period = sampling_period
    params.data_type = data_type
    params.output_format = output_format
    return params
Ejemplo n.º 23
0
def plot_accNumber(home,project_name,cata_name,filter_detc,min_inter,time1,time2):
    #plot accumulated number of EQ in catalog v.s. detections
    '''
        min_inter: minimum inter event time (s)
        time1,time2: plot data between the range
    '''
    import glob
    from repeq import data_proc
    from obspy import UTCDateTime
    import datetime
    from repeq import data_proc
    import matplotlib
    matplotlib.use('pdf') #instead using interactive backend
    import matplotlib.pyplot as plt
    '''
    filter_detc = {
        'min_stan':9, #number of non-zero CC measurements
        'min_CC':0.5, #min mean(CC) value
        'diff_t':60, #time difference between events should larger than this
    }
    '''
    #load catalog and get their time
    df = data_proc.cat2pd(home+'/'+project_name+'/catalog/'+cata_name)
    template_time = [UTCDateTime(df.Date[i]+'T'+df.Time[i]) for i in range(len(df))]
    template_time = np.array(template_time)

    #load detections and get their time
    detcs = glob.glob(home+'/'+project_name+'/'+'output/Template_match/Detections/'+'Detected_tmp_*.npy')
    detcs.sort()
    detc_time = []
    for detc_path in detcs:
        detc = np.load(detc_path,allow_pickle=True)
        detc = detc.item()
        detc = data_proc.clean_detc(detc,filter_detc)
        detc_time += detc.keys()

    detc_time.sort()
    detc_time = np.array(detc_time)
    detc_time = [UTCDateTime(i) for i in detc_time]

    #set min-interevent time to remove redundant data
    clean_template_time = data_proc.clean_events_time(template_time,min_time=min_inter)
    clean_detc_time = data_proc.clean_events_time(detc_time,min_time=min_inter)

    t_temp, accnum_temp = data_proc.cal_accum(clean_template_time,time1,time2,dt=3600)
    t_detc, accnum_detc = data_proc.cal_accum(clean_detc_time,time1,time2,dt=3600)

    main_OT = UTCDateTime("2018-05-04T22:32:54.650Z").datetime #mainshock OT
    #convert UTCDateTime to datetime for plotting
    t_temp = [i.datetime for i in t_temp]
    t_detc = [i.datetime for i in t_detc]
    plt.figure(figsize=(10,4.5))
    plt.plot(t_temp,accnum_temp,'k')
    plt.plot(t_detc,accnum_detc,'r')
    print('***manually add something in plot function***')
    plt.plot([main_OT,main_OT],[0,np.max(accnum_detc)],'r--')
    plt.ylim([0,np.max(accnum_detc)])
    plt.xlim([UTCDateTime(time1).datetime,UTCDateTime(time2).datetime])
    plt.xlabel('Date',fontsize=14)
    plt.ylabel('Accumulated number',fontsize=14)
    plt.savefig(home+'/'+project_name+'/'+'output/Template_match/Detections/'+'detections.png')
    plt.close()
Ejemplo n.º 24
0
def sahke(network_code="X2", level="station", comp_list=["N", "E", "Z"]):
    """
    SAHKE transect broadband stations aren't fetchable through fdsn webservies,
    build a network object from the available information that was collected
    through the SAHKE final report provided by Martha Savage.

    I'm not sure if the CMG's are 30s or 60s instruments or how much that 
    actually matters

    Notes from GNS SAHKE Report:

        Instruments and dataloggers:
        CMG3ESP: T004, LTN6
        CMG40T: LE4, T007, T010, T014, T016, T018, T020
        Dataloggers: Reftek-130s 

        3-2-2010 (2010-061): LE4 sampling rate changed from 40Hz to 100Hz
        This isn't relevant for the data that I have...

    NRL variations:
        3ESP:
            Natural Period: "100 s - 50 Hz", "120 s - 50 Hz", "30 s - 50 Hz", 
                            "60 s - 50 Hz"
            Sensitivity: "1500", "2000", "20000"
        40T:
            Natural Period: "100s - 50Hz", "10s - 100Hz", "1s - 100Hz",
                            "20s - 50Hz", "2s - 100Hz", "30s - 100Hz",
                            "30s - 50 Hz", "40s - 100Hz", "5s - 100Hz",
                            "60s - 100Hz", "60s - 50Hz"
            Sensitivity:   "1600", "2000", "20000", "800"
        RT130S:
            Gain: "1", "32"



    :type network_code: str
    :param network_code: chosen two value code used for the network
    :type level: str
    :param level: level to propogate network creation
    :type comp_list: list of str
    :param comp_list: components to create channels for
    :rtype: obspy.core.inventory.network.Network
    :return: obspy Network object with information propogated to chosen level
    """
    # station, location, start, stop, lat, lon, instr type
    station_info = np.array(
        [["LE4", "", "2010-136", "2010-331", -41.3579, 175.6919, "40t"],
         ["LTN6", "LT", "2010-193", "2010-349", -41.1033, 175.3238, "3esp"],
         ["T004", "", "2010-088", "2010-255", -41.3403, 175.6688, "3esp"],
         ["T007", "", "2010-041", "2010-123", -41.3041, 175.6513, "40t"],
         ["T010", "T0", "2010-135", "2010-348", -41.2520, 175.5825, "40t"],
         ["T014", "", "2010-034", "2010-350", -41.2075, 175.5063, "40t"],
         ["T016", "", "2010-088", "2010-322", -41.1893, 175.4737, "40t"],
         ["T018", "", "2010-055", "2010-349", -41.1715, 175.3850, "40t"],
         ["T020", "", "2010-089", "2010-261", -41.1251, 175.3497, "40t"]])

    # For setting the network timing
    starttimes = station_info[:, 2]
    endtimes = station_info[:, 3]

    unique_starts = [UTCDateTime(str(_)) for _ in np.unique(starttimes)]
    unique_ends = [UTCDateTime(str(_)) for _ in np.unique(endtimes)]

    min_starttime = min(unique_starts)
    max_endtime = max(unique_ends)

    # Elevations are not known
    default_elevation = 0.0
    default_depth = 0.0
    default_site = Site(name="SAHKE")

    # Create response information
    if level == "channel":
        nrl = NRL()
        responses = {
            "40t":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-40T", "60s - 50Hz", "2000"],
                datalogger_keys=["REF TEK", "RT 130S & 130-SMHR", "1", "100"]),
            "3esp":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-3ESP", "60 s - 50 Hz", "2000"],
                datalogger_keys=["REF TEK", "RT 130S & 130-SMHR", "1", "100"]),
        }

    # Add stations to Station objects
    stations = []
    for stalist in station_info:
        # Parse the list to avoid confusion with indices
        code = stalist[0]
        location = stalist[1]
        start_date = UTCDateTime(stalist[2])
        end_date = UTCDateTime(stalist[3])
        latitude = stalist[4]
        longitude = stalist[5]

        # Create channel level objects if required
        if level == "channel":
            channels = []
            for comp in comp_list:
                cha = Channel(code=f"HH{comp}",
                              location_code=location,
                              start_date=start_date,
                              end_date=end_date,
                              latitude=latitude,
                              longitude=longitude,
                              elevation=default_elevation,
                              depth=default_depth,
                              azimuth=0.0,
                              dip=-90.0,
                              sample_rate=100)
                cha.response = responses[stalist[-1]]
                channels.append(cha)
        else:
            channels = None

        # Create the Station object
        station = Station(code=code,
                          start_date=start_date,
                          end_date=end_date,
                          latitude=latitude,
                          longitude=longitude,
                          elevation=default_elevation,
                          site=default_site,
                          creation_date=UTCDateTime(),
                          channels=channels)

        stations.append(station)

    # Build the network and place a description
    network = Network(code=network_code,
                      start_date=min_starttime,
                      end_date=max_endtime,
                      description="SAHKE BBTRANSECT",
                      stations=stations)

    return network
Ejemplo n.º 25
0
def plot_reptcs(home,project_name,tempID,NetStaChnLoc,phs,cut_window,v_minmax=[-3,3],ref_OT="2018-05-04T22:32:54.650"):
    '''
        #plot detected tcs by template ID
        tempID:template ID(e.g. '00836')
        NetStaChnLoc: net.station_name.channel.loc (e.g. HV.JOKA.HHZ. )
        phs: phase name in case both P/S in same NetStaChnLoc
        cut_window: window same as when using data_proc.cut_dailydata or data_proc.bulk_cut_dailydata
                    Note that time information of lag measurements is provided in measure_lag_temp*.npy file
        v_minmax: range of colormap plotting for lag measurement
        ref_OT: set y at ref_OT=0
    '''
    import numpy as np
    import matplotlib
    matplotlib.use('pdf')
    import matplotlib.pyplot as plt
    import numpy as np
    from obspy import UTCDateTime,Stream
    import os
    
    ref_OT = UTCDateTime(ref_OT)
    fullName = NetStaChnLoc+'.'+phs
    #find the file to be plotted
    tcs_cut = home+'/'+project_name+'/'+'output/Template_match/Data_detection_cut/Detected_data_'+tempID+'.npy'
    file_lag = home+'/'+project_name+'/'+'output/Template_match/Measure_lag/measure_lag_temp_'+tempID+'.npy'
    flag_plotall = True
    #to see if the file exist
    if not(os.path.exists(tcs_cut)):
        print('File:%s do not exist! break'%(tcs_cut))
        return
    if not(os.path.exists(file_lag)):
        print(' Lag measurement file:%s do not exist, only show time series'%(file_lag))
        flag_plotall = False #only one subplot

    #load tcs_cut file
    D = np.load(tcs_cut,allow_pickle=True)
    D = D.item()

    #load lag measurement if file exist
    if os.path.exists(file_lag):
        MeasLag = np.load(file_lag,allow_pickle=True)
        MeasLag = MeasLag.item()

    #some plot setting(normalize, get range of y etc.)
    y_range = [i for i in D['detc_tcs'].keys()]
    y_range.sort()
    num_y = len(y_range)
    if len(y_range)==1:
        y_range = [UTCDateTime(y_range[0])-86400,UTCDateTime(y_range[0])+86400]
    else:
        y_range = [UTCDateTime(y_range[0]),UTCDateTime(y_range[-1])] #the data spanning from y_range[0] to y_range[1]
    dy_range = (y_range[1]-y_range[0])/86400.0
    data_mul = dy_range/num_y #tcs with this amplitude should be good

    fig = plt.figure(constrained_layout=True)
    props = dict(boxstyle='round', facecolor='white', alpha=0.8)
    gs = fig.add_gridspec(3,1)
    if os.path.exists(file_lag):
        f3_ax1 = fig.add_subplot(gs[:2, 0]) #merge the first two row
    else:
        f3_ax1 = fig.add_subplot(gs[:, 0]) #merge all the three row

    #####start plotting#####
    for ik in D['detc_tcs'].keys():
        #print('--select:',NetStaChnLoc,'from D["detc_tcs"][%s]'%(ik))
        DD = D['detc_tcs'][ik].select(network=NetStaChnLoc.split('.')[0],station=NetStaChnLoc.split('.')[1],channel=NetStaChnLoc.split('.')[2],location=NetStaChnLoc.split('.')[3])
        if len(DD)==0:
            continue #note that NetStaChnLoc doesnt always in every D['detc_tcs'][ik]
        if len(DD)!=1:
            #selected two or more phases, add phs condition and select data again
            phases = D['phase'][ik]
            #***make sure the order if D and phases are the same
            for i in range(len(DD)):
                if (DD[i].stats.network==NetStaChnLoc.split('.')[0]) & (DD[i].stats.station==NetStaChnLoc.split('.')[1]) & (DD[i].stats.channel==NetStaChnLoc.split('.')[2]) & (DD[i].stats.location==NetStaChnLoc.split('.')[3]):
                    if phases[i]==phs:
                        #also check the phase
                        DD = Stream(DD[i].copy())
                        break
        #selected the data, start plotting data
        #print('----data selected:',DD)
        time = DD[0].times()
        data = DD[0].data
        data_norm = data/np.max(data)*data_mul
        shft_plt = (UTCDateTime(ik)-ref_OT)/86400.0 # reference time in days
        f3_ax1.plot(time-cut_window[0],data_norm+shft_plt,'k',linewidth=1.0)
        f3_ax1.fill_between(time-cut_window[0],np.zeros_like(time)+shft_plt,data_norm+shft_plt,where=np.zeros_like(time)+shft_plt>data_norm+shft_plt,color=[0,0,0],interpolate=True)
    #after plotting tcs, add text and adjust axis, plot label
    x_pos = ((cut_window[1]-cut_window[0]*-1))*0.03 + +cut_window[0]*-1
    y_pos = f3_ax1.get_ylim()
    y_pos = (y_pos[1]-y_pos[0])*0.9+y_pos[0]
    f3_ax1.text(x_pos,y_pos,fullName,fontsize=12,bbox=props)
    f3_ax1.set_xlim([cut_window[0]*-1,cut_window[1]])
    f3_ax1.set_ylabel('Day relative to mainshock',fontsize=14)
    if (os.path.exists(file_lag)):
        #two subplots
        f3_ax1.set_xticklabels([]) #remove xlabel in the first subplot
    else:
        #if only one subplot
        f3_ax1.set_xlabel('Arrival time (s)',fontsize=14)
        plt.savefig(home+'/'+project_name+'/'+'output/Template_match/Figs/reptcs_'+tempID+'_'+fullName+'.png')
        print('figure saved:',home+'/'+project_name+'/'+'output/Template_match/Figs/reptcs_'+tempID+'_'+fullName+'.png')
        plt.close()
        return

    #use vmin and vmax from input
    vmin = v_minmax[0]
    vmax = v_minmax[1]

    #if not returned, continue to two subplots case
    f3_ax2 = fig.add_subplot(gs[-1, 0])
    f3_ax2.set_xlim([cut_window[0]*-1,cut_window[1]])
    f3_ax2.set_ylim([-0.2,0.2])
    f3_ax2.set_xlabel('Arrival time (s)',fontsize=14)
    f3_ax2.set_ylabel(r'$\tau$ (s)',fontsize=14)

    #get range of day relative to reftime (for different color)
    iks = [ik for ik in MeasLag['detc_OT'].keys()]
    iks.sort()
    iks_ref = np.array([(UTCDateTime(ik)-ref_OT)/86400.0 for ik in iks])
    print('iks_ref=',iks_ref)
    print('***Fix the vmin,vmax to %f,%f***'%(vmin,vmax))
    #cmap_ref = plt.cm.seismic(plt.Normalize(iks_ref[0],iks_ref[-1])(iks_ref)) #use the vminmax from data
    
    #cmap_ref = plt.cm.seismic(plt.Normalize(-10,10)(iks_ref)) #use the define seismic
    my_color = my_colormap()
    print('use manual seismic colormap')
    cmap_ref = my_color(plt.Normalize(vmin,vmax)(iks_ref))

    ik_color = {} #make color table
    for i in range(len(iks_ref)):
        ik_color[iks[i]] = cmap_ref[i]


    plt.plot([cut_window[0]*-1,cut_window[1]],[0,0],'k--',linewidth=0.3) #plot horizontal line
    #loop all the available measurements
    for ik in MeasLag['detc_OT'].keys():
        if fullName in MeasLag['detc_OT'][ik]:
            lag_time = MeasLag['detc_OT'][ik][fullName]['time']
            lag_shift = MeasLag['detc_OT'][ik][fullName]['shift']
            lag_CCC = MeasLag['detc_OT'][ik][fullName]['CCC']
            #if np.mean(lag_CCC)<0.5:
            #    continue
            #plt_idx = np.where(lag_CCC>=0.5)[0]
            if (UTCDateTime(ik)-ref_OT)/86400.0 < vmin or (UTCDateTime(ik)-ref_OT)/86400.0>vmax:
                continue
            plt.plot(lag_time,lag_shift,color=ik_color[ik],linewidth=0.5)
            #plt.plot(lag_time[plt_idx],lag_shift[plt_idx],'.-',color=ik_color[ik],linewidth=0.3,markersize=0.1)

    #add colormap
    #norm = matplotlib.colors.Normalize(vmin=iks_ref[0], vmax=iks_ref[-1])
    print('***Fix the vmin,vmax to %f,%f***'%(vmin,vmax))
    norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax)
    #cmap = matplotlib.cm.ScalarMappable(norm=norm, cmap='seismic')
    cmap = matplotlib.cm.ScalarMappable(norm=norm, cmap=my_color)
    cmap.set_array([])

    #These two lines mean put the bar inside the plot
    cbaxes = fig.add_axes([0.73, 0.3, 0.12, 0.022])
    clb=plt.colorbar(cmap,cax=cbaxes, orientation='horizontal',label='Day')
    clb.set_label('Day', rotation=0,labelpad=0)

    plt.savefig(home+'/'+project_name+'/'+'output/Template_match/Figs/reptcs_'+tempID+'_'+fullName+'.png')
    print('figure-2subplots saved:',home+'/'+project_name+'/'+'output/Template_match/Figs/reptcs_'+tempID+'_'+fullName+'.png')
Ejemplo n.º 26
0
def bannister(network_code="ZX", level="station", comp_list=["N", "E", "Z"]):
    """
    Stephen Bannister deployed a broadband network in the northern north island.
    Data was provided via personal communication, this list of station locations
    was generated based on Stephen's email exchange with Yoshi.

    Most stations werent provided with an end date

    :type network_code: str
    :param network_code: chosen two value code used for the network
    :type level: str
    :param level: level to propogate network creation
    :type comp_list: list of str
    :param comp_list: components to create channels for
    :rtype: obspy.core.inventory.network.Network
    :return: obspy Network object with information propogated to chosen level
    """
    # station, lat, lon, depth, start, end
    station_info_zx = np.array([
        ['GA01', -39.0331, 177.8549, 33.000, 2011305, 9999001],
        ['GA02', -38.7793, 177.8695, 220.000, 2011305, 9999001],
        ['GA03', -38.6059, 177.9959, 46.000, 2011305, 9999001],
        ['GA04', -38.5529, 178.1548, 210.000, 2011305, 9999001],
        ['GA05', -38.5844, 177.8048, 85.000, 2011305, 9999001],
        ['GA06', -39.1110, 177.9161, 159.000, 2012001, 9999001],
        ['GA07', -38.4322, 178.0478, 569.000, 2012001, 9999001],
        ['GA08', -38.8312, 177.7095, 358.000, 2012001, 9999001],
        ['GA09', -38.5194, 177.9363, 107.000, 2013152, 9999001],
        ['GA10', -38.9087, 177.4627, 57.000, 2013152, 9999001],
        # ['GA11', -38.4932, 177.6711, 224.000, 2014060, 9999001],
    ])

    station_info_z8 = np.array([
        ['HD01', -38.4802, 175.9473, 470.000, 2009244, 2010091],
        ['HD02', -38.6275, 175.9196, 600.000, 2009244, 2012306],
        ['HD03', -38.5497, 176.0564, 630.000, 2009244, 2010091],
        ['HD04', -38.4930, 176.2204, 440.000, 2009244, 9999001],
        ['HD05', -38.4663, 176.2627, 445.000, 2009244, 2011091],
        ['HD06', -38.3932, 176.0619, 280.000, 2009244, 2011091],
        ['HD07', -38.3214, 176.1626, 480.000, 2009244, 2010091],
        ['HD08', -38.6300, 176.3063, 520.000, 2009244, 2011091],
        ['HD09', -38.6675, 176.1798, 450.000, 2010001, 2010305],
        ['HD10', -38.5482, 176.3669, 390.000, 2010001, 2011091],
        ['HD11', -38.6320, 176.2606, 320.000, 2010091, 2011305],
        ['HD12', -38.3711, 176.1570, 360.000, 2010091, 2011091],
        ['HD13', -38.4546, 176.3458, 340.000, 2010091, 2011091],
        ['HD14', -38.4594, 176.1714, 340.000, 2010091, 2010305],
        ['HD15', -38.4872, 176.0043, 660.000, 2010091, 2011091],
        ['HD16', -38.4408, 175.9444, 420.000, 2010091, 2011091],
        ['HD17', -38.5483, 175.0556, 600.000, 2010091, 2011091],
        ['HD18', -38.5283, 176.4573, 375.000, 2010305, 2011091],
        ['HD19', -38.3762, 176.3696, 325.000, 2010305, 2011091],
        ['HD20', -38.5842, 176.1467, 430.000, 2010305, 2011091],
        ['HD21', -38.5052, 176.0877, 515.000, 2010305, 2011091],
        ['HD22', -38.5666, 176.1907, 385.000, 2010305, 2011001],
        ['HD23', -38.6253, 175.9533, 520.000, 2010305, 2011091],
        ['HD24', -38.5671, 176.0895, 560.000, 2010305, 2011091],
        ['HD25', -38.5855, 176.2945, 305.000, 2010305, 2011091],
        ['HD26', -38.5696, 175.9547, 610.000, 2010305, 2011091],
        ['HD27', -38.4324, 176.2374, 530.000, 2010305, 2011091],
        ['HD28', -38.4424, 176.1580, 380.000, 2010305, 2011091],
        ['HD29', -38.4012, 176.1991, 460.000, 2010305, 2011091],
        ['HD30', -38.5291, 175.9377, 480.000, 2010305, 2011091],
        ['HD31', -38.3019, 176.3053, 525.000, 2010305, 2011091],
        ['HD32', -38.7035, 176.1434, 558.000, 2010335, 2011091],
        ['HD33', -38.5031, 176.2634, 305.000, 2011001, 2011091],
        ['HD34', -38.4764, 176.0497, 515.000, 2011032, 2011091],
        ['HD35', -38.5331, 176.0008, 500.000, 2011032, 2011060],
        ['HD36', -38.4912, 176.1677, 340.000, 2011032, 2011091],
        ['HD37', -38.4177, 176.1366, 355.000, 2011032, 2011091],
        ['HD38', -38.5190, 176.2041, 325.000, 2011032, 2011091],
        ['HD39', -38.4868, 176.3221, 295.000, 2011032, 2011091],
        # We don't have any data for these stations
        # ['HD50', -38.3396, 176.2687, 361.000, 2015305, 9999001],
        # ['HD51', -38.2435, 176.2483, 380.000, 2015305, 9999001],
        # ['HD53', -38.2659, 176.4812, 362.000, 2015335, 9999001],
        # ['HD54', -38.2815, 176.3781, 480.000, 2015335, 9999001],
        # ['HD55', -38.2815, 176.5605, 469.000, 2015335, 9999001],
        # ['HD56', -38.3184, 176.5538, 439.000, 2015335, 9999001],
        # ['HD57', -38.1872, 176.3714, 520.000, 2015335, 9999001],
        # ['HD58', -38.3019, 176.3053, 492.000, 2015335, 9999001],
        # ['HD59', -38.2815, 176.3781, 440.000, 2015335, 9999001],
        # ['HD60', -38.2863, 176.1992, 389.000, 2016061, 9999001]
    ])

    # Pick which network to be exported
    if network_code == "ZX":
        station_info = station_info_zx
    elif network_code == "Z8":
        station_info = station_info_z8

    # Elevations are not known
    default_elevation = 0.0
    default_site = Site(name="BAN")

    # For setting the network timing
    unique_starts = [
        UTCDateTime(str(_)) for _ in np.unique(station_info[:, 4])
    ]

    # Ignore the random value given for unknown endtimes
    unique_ends = [
        UTCDateTime(str(_)) for _ in np.unique(station_info[:, 5])[:-1]
    ]

    min_starttime = min(unique_starts)
    # If no actual endtimes, manual set the endtime
    try:
        max_endtime = max(unique_ends)
    except ValueError:
        max_endtime = UTCDateTime("2015-01-01")

    # Add stations to objects. Some endtimes are not specified
    stations = []
    for stalist in station_info:
        code = stalist[0]
        latitude = stalist[1]
        longitude = stalist[2]
        depth = stalist[3]
        start_date = UTCDateTime(str(stalist[4]))
        endtime = stalist[5]

        # Deal with endtimes, set to the latest available endtime in the list
        if endtime == 9999001:
            end_date = max(unique_ends)
        else:
            end_date = UTCDateTime(endtime)

        # Create channel level objects if required
        if level == "channel":
            channels = []
            for comp in comp_list:
                cha = Channel(code=f"HH{comp}",
                              location_code="10",
                              start_date=start_date,
                              end_date=end_date,
                              latitude=latitude,
                              longitude=longitude,
                              elevation=default_elevation,
                              depth=depth,
                              azimuth=0.0,
                              dip=-90.0,
                              sample_rate=100)
                channels.append(cha)
        else:
            channels = None

        # Create the station object
        station = Station(code=code,
                          latitude=latitude,
                          longitude=longitude,
                          elevation=default_elevation,
                          start_date=start_date,
                          end_date=end_date,
                          site=default_site,
                          creation_date=UTCDateTime(),
                          channels=channels)
        stations.append(station)

    # Create the network object
    network = Network(code=network_code,
                      start_date=min_starttime,
                      end_date=max_endtime,
                      description="Stephen Bannister's Broadband Network",
                      stations=stations)

    return network
Ejemplo n.º 27
0
client = Client('138.253.113.19',
                16022)  # ip, port - ip's 138.253.113.19 or 138.253.112.23

sr = 100
nsta = int(1 * sr)
nlta = int(20 * sr)
trig_on = 7.5
trig_off = 0.2
event = 0
on_off_save = np.zeros(shape=(0, 4))
no_data = 0
shift = 15000
crit = 0.5

#%%
t1_ref = UTCDateTime(1459324380)
t2_ref = t1_ref + 120
st_ref = Stream()
st_ref = client.get_waveforms(net, 'LB03', '', 'HHZ', t1_ref, t2_ref)

st_ref.detrend(type='linear')
st_ref.detrend(type='demean')
tr_ref = st_ref[0].slice(starttime=t1_ref, endtime=t2_ref)

tr_ref.detrend(type='linear')
tr_ref.detrend(type='demean')
tr_ref.filter(type='bandpass', freqmin=0.001, freqmax=0.1)

st_c_ref = calibrate(tr_ref)
st_c_ref.plot(color='g', starttime=t1_ref, endtime=t1_ref + 120)
Ejemplo n.º 28
0
def beacon(network_code="XX", level="station", comp_list=["N", "E", "Z"]):
    """
    Create Beacon network data from scratch.
    Station information taken from the Site and Sensor field deployment notes
    kept on a shared Google Drive with Yoshi, Jonathan and myself.

    Updated 1.5.2020

    :type network_code: str
    :param network_code: chosen two value code used for the network
    :type level: str
    :param level: level to propogate network creation
    :type comp_list: list of str
    :param comp_list: components to create channels for
    :rtype: obspy.core.inventory.network.Network
    :return: obspy Network object with information propogated to chosen level
    """
    # Station name, Abbreviation, Code, Lat, Lon, Start, End, Instrument type
    station_info = np.array([
        [
            "Pori Rd", "PORI", "RD01", "-40.55475083", "175.9710354",
            "2017-07-19", "2019-04-04", "60s"
        ],
        [
            "Angora Rd", "ANGR", "RD02", "-40.45974293", "176.4750588",
            "2017-07-19", "2019-04-04", "60s"
        ],
        [
            "Te Uri Rd", "TURI", "RD03", "-40.2656269", "176.3828498",
            "2017-07-20", "2019-04-04", "30s"
        ],
        [
            "Porangahau", "PORA", "RD04", "-40.2667317", "176.6344719",
            "2017-07-20", "2019-04-04", "60s"
        ],
        [
            "Manuhara Rd", "MNHR", "RD05", "-40.4689786", "176.2231874",
            "2017-07-20", "2019-04-05", "30s"
        ],
        [
            "Dannevirke", "DNVK", "RD06", "-40.2971794", "176.1663731",
            "2017-07-24", "2019-04-02", "30s"
        ],
        [
            "Waipawa", "WPAW", "RD07", "-39.9017124", "176.5370861",
            "2017-07-24", "2019-04-02", "60s"
        ],
        [
            "Raukawa", "RAKW", "RD08", "-39.7460611", "176.6205577",
            "2017-07-24", "2019-04-02", "60s"
        ],
        [
            "McNeill Hill", "MCNL", "RD09", "-39.4447675", "176.6974385",
            "2017-07-25", "2019-04-03", "60s"
        ],
        [
            "Cape Kidnappers", "CPKN", "RD10", "-39.64661592", "177.0765055",
            "2017-07-25", "2018-03-13", "60s"
        ],
        [
            "Kahuranaki", "KAHU", "RD11", "-39.78731589", "176.8624521",
            "2017-07-25", "2018-03-13", "60s"
        ],
        [
            "Kaweka Forest", "KWKA", "RD12", "-39.425214", "176.4228",
            "2017-07-26", "2019-05-03", "30s"
        ],
        [
            "Kereru", "KERE", "RD13", "-39.643259", "176.3768865",
            "2017-07-26", "2019-04-03", "60s"
        ],
        [
            "Pukenui", "PNUI", "RD14", "-39.9129963", "176.2001869",
            "2017-07-26", "2018-09-08", "60s"
        ],
        [
            "Waipukarau", "WPUK", "RD15", "-40.0627107", "176.4391311",
            "2017-07-27", "2019-04-02", "60s"
        ],
        [
            "Omakere", "OROA", "RD16", "-40.105341", "176.6804449",
            "2017-07-27", "2019-04-04", "60s"
        ],
        [
            "Te Apiti Rd", "TEAC", "RD17", "-39.90868978", "176.9561896",
            "2017-09-25", "2018-03-14", "30s"
        ],  # no sensor number, no instr type
        [
            "River Rd", "RANC", "RD18", "-39.929775", "176.7039773",
            "2017-09-25", "2019-04-03", "30s"
        ],
        [
            "Matapiro Rd", "MATT", "RD19", "-39.5796128", "176.6449024",
            "2018-03-14", "2018-06-25", "30s"
        ],  # same instr. as RD10
        [
            "Kahuranaki", "KAHU2", "RD20", "-39.79385769", "176.8758813",
            "2018-03-13", "2018-09-03", "30s"
        ],  # same instr. as RD11
        [
            "Te Apiti Rd", "TEAC2", "RD21", "-39.913152", "176.946881",
            "2018-03-14", "2019-04-03", "30s"
        ],  # same instr. as RD17
        [
            "Castlepoint", "CAPT", "RD22", "-40.910278", "176.199167",
            "2018-07-20", "2019-05-05", "60s"
        ],  # unknown sensor number
    ])

    # For setting the network timing
    starttimes = station_info[:, 5]
    endtimes = station_info[:, 6]

    unique_starts = [UTCDateTime(str(_)) for _ in np.unique(starttimes)]
    unique_ends = [UTCDateTime(str(_)) for _ in np.unique(endtimes)]

    min_starttime = min(unique_starts)
    max_endtime = max(unique_ends)

    # Elevations are not known
    default_elevation = 0.0
    default_depth = 0.0

    # Response is the same for all stations. Response information was provided
    # through personal correspondance to GeoNet site selection scientist
    # Jonathan Hanson, but could also be ascertained from the instrument type
    # and datalogger type
    if level == "channel":
        nrl = NRL()
        responses = {
            "30s":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-40T", "30s - 50 Hz", "800"],
                datalogger_keys=[
                    "Nanometrics", "Taurus", "16 Vpp (1)", "Low (default)",
                    "Off", "100"
                ]),
            "60s":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-40T", "60s - 50Hz", "800"],
                datalogger_keys=[
                    "Nanometrics", "Taurus", "16 Vpp (1)", "Low (default)",
                    "Off", "100"
                ])
        }

    # Add stations to objects
    stations = []
    for stalist in station_info:
        # Parse the station information
        name = stalist[0]  # e.g. Castlepoint
        nickname = stalist[1]  # e.g. CAPT
        code = stalist[2]  # e.g. RD22
        latitude = float(stalist[3])
        longitude = float(stalist[4])
        start_date = UTCDateTime(stalist[5])
        end_date = UTCDateTime(stalist[6])

        # Create channel level objects if required
        if level == "channel":
            channels = []
            for comp in comp_list:
                cha = Channel(code=f"HH{comp}",
                              location_code="10",
                              start_date=start_date,
                              end_date=end_date,
                              latitude=latitude,
                              longitude=longitude,
                              elevation=default_elevation,
                              depth=default_depth,
                              azimuth=0.0,
                              dip=-90.0,
                              sample_rate=100)
                # Attach the response
                cha.response = responses[stalist[7]]
                channels.append(cha)
        else:
            channels = None

        # Create the site object to provide information on the site location
        site = Site(name=nickname, description=name)

        # Create the station object
        station = Station(code=code,
                          latitude=latitude,
                          longitude=longitude,
                          elevation=default_elevation,
                          start_date=start_date,
                          end_date=end_date,
                          site=site,
                          creation_date=UTCDateTime(),
                          channels=channels)
        stations.append(station)

    # Create the network object
    network = Network(code=network_code,
                      start_date=min_starttime,
                      end_date=max_endtime,
                      description="Broadband East Coast Network",
                      stations=stations)
    return network
Ejemplo n.º 29
0
 def convert(self, value):
     # check for datetime
     if 'T' in self.flags:
         return UTCDateTime(value)
     return value
Ejemplo n.º 30
0
Archivo: util.py Proyecto: zurgeg/obspy
def sac_to_obspy_header(sacheader):
    """
    Make an ObsPy Stats header dictionary from a SAC header dictionary.

    :param sacheader: SAC header dictionary.
    :type sacheader: dict

    :rtype: :class:`~obspy.core.Stats`
    :return: Filled ObsPy Stats header.

    """

    # 1. get required sac header values
    try:
        npts = sacheader['npts']
        delta = sacheader['delta']
    except KeyError:
        msg = "Incomplete SAC header information to build an ObsPy header."
        raise KeyError(msg)

    assert npts != HD.INULL
    assert delta != HD.FNULL
    #
    # 2. get time
    try:
        reftime = get_sac_reftime(sacheader)
    except (SacError, ValueError, TypeError):
        # ObsPy doesn't require a valid reftime
        reftime = UTCDateTime(0.0)

    b = sacheader.get('b', HD.FNULL)
    #
    # 3. get optional sac header values
    calib = sacheader.get('scale', HD.FNULL)
    kcmpnm = sacheader.get('kcmpnm', HD.SNULL)
    kstnm = sacheader.get('kstnm', HD.SNULL)
    knetwk = sacheader.get('knetwk', HD.SNULL)
    khole = sacheader.get('khole', HD.SNULL)
    #
    # 4. deal with null values
    b = b if (b != HD.FNULL) else 0.0
    calib = calib if (calib != HD.FNULL) else 1.0
    kcmpnm = kcmpnm if (kcmpnm != HD.SNULL) else ''
    kstnm = kstnm if (kstnm != HD.SNULL) else ''
    knetwk = knetwk if (knetwk != HD.SNULL) else ''
    khole = khole if (khole != HD.SNULL) else ''
    #
    # 5. transform to obspy values
    # nothing is null
    stats = {}
    stats['npts'] = npts
    stats['sampling_rate'] = np.float32(1.) / np.float32(delta)
    stats['network'] = _clean_str(knetwk)
    stats['station'] = _clean_str(kstnm)
    stats['channel'] = _clean_str(kcmpnm)
    stats['location'] = _clean_str(khole)
    stats['calib'] = calib

    # store _all_ provided SAC header values
    stats['sac'] = sacheader.copy()

    # get first sample absolute time as UTCDateTime
    # always add the begin time (if it's defined) to get the given
    # SAC reference time, no matter which iztype is given
    # b may be non-zero, even for iztype 'ib', especially if it was used to
    #   store microseconds from obspy_to_sac_header
    stats['starttime'] = UTCDateTime(reftime) + b

    return Stats(stats)