Exemple #1
0
    def section_plot(self,
                     assoc_id,
                     files,
                     seconds_ahead=5,
                     record_length=100,
                     channel='Z'):

        station = self.assoc_db.query(
            Candidate.sta).filter(Candidate.assoc_id == assoc_id).all()
        sta_list = []
        for sta, in station:
            sta_list.append(str(sta))
        station_single = self.assoc_db.query(Pick.sta).filter(
            Pick.assoc_id == assoc_id).filter(Pick.locate_flag == None).all()
        for sta, in station_single:
            sta_list.append(str(sta))
        #print sta_list

        eve = self.assoc_db.query(Associated).filter(
            Associated.id == assoc_id).first()
        # Earthquakes' epicenter
        eq_lat = eve.latitude
        eq_lon = eve.longitude

        # Reading the waveforms
        ST = Stream()
        for file in files:
            st = read(file)
            ST += st

        # in case of some seismometer use channel code like BH1, BH2 or BH3, resign the channel code as:
        if channel == 'E' or channel == 'e':
            Chan = 'E1'
        elif channel == 'N' or channel == 'n':
            Chan = 'N2'
        elif channel == 'Z' or channel == 'z':
            Chan = 'Z3'
        else:
            print(
                'Please input component E, e, N, n, Z, or z, the default is Z')

        # Calculating distance from headers lat/lon
        ST_new = Stream()  #;print ST
        for tr in ST:
            if tr.stats.channel[2] in Chan and tr.stats.station in sta_list:
                if tr.stats.starttime.datetime < eve.ot and tr.stats.endtime.datetime > eve.ot:
                    tr.trim(
                        UTCDateTime(eve.ot - timedelta(seconds=seconds_ahead)),
                        UTCDateTime(eve.ot + timedelta(seconds=record_length)))
                    ST_new += tr
        #print ST_new.__str__(extended=True)

        while True:
            ST_new_sta = []
            for tr in ST_new:
                ST_new_sta.append(tr.stats.station)
            duplicate = list(
                set([tr for tr in ST_new_sta if ST_new_sta.count(tr) > 1]))
            if not duplicate:
                break
            index = [
                i for (i, j) in enumerate(ST_new_sta) if j == duplicate[-1]
            ]
            i = 0
            while True:
                if ST_new[index[i]].stats.npts < ST_new[index[i +
                                                              1]].stats.npts:
                    del ST_new[index[i]]
                    break
                elif ST_new[index[i]].stats.npts >= ST_new[index[
                        i + 1]].stats.npts:
                    del ST_new[index[i + 1]]
                    break
        #print ST_new.__str__(extended=True)

        ST_new.detrend('demean')
        #     ST_new.filter('bandpass', freqmin=0.1, freqmax=100)

        factor = 10
        numRows = len(ST_new)
        segs = []
        ticklocs = []
        sta = []
        circle_x = []
        circle_y = []
        segs_picks = []
        ticklocs_picks = []
        for tr in ST_new:
            dmax = tr.data.max()
            dmin = tr.data.min()
            data = tr.data / (dmax - dmin) * factor
            t = np.arange(
                0,
                round(tr.stats.npts / tr.stats.sampling_rate / tr.stats.delta)
            ) * tr.stats.delta  # due to the float point arithmetic issue, can not use "t=np.arange(0,tr.stats.npts/tr.stats.sampling_rate,tr.stats.delta)"
            segs.append(np.hstack((data[:, np.newaxis], t[:, np.newaxis])))
            lon, lat = self.tt_stations_db_3D.query(
                Station3D.longitude, Station3D.latitude).filter(
                    Station3D.sta == tr.stats.station).first()
            #      distance = int(gps2DistAzimuth(lat,lon,eq_lat,eq_lon)[0]/1000.)  #gps2DistAzimuth return in meters, convert to km by /1000
            distance = int(
                gps2dist_azimuth(lat, lon, eq_lat, eq_lon)[0] / 1000.
            )  #gps2DistAzimuth return in meters, convert to km by /1000
            #       distance=self.assoc_db.query(Candidate.d_km).filter(Candidate.assoc_id==assoc_id).filter(Candidate.sta==tr.stats.station).first()[0]#;print distance,tr.stats.station
            ticklocs.append(distance)
            sta.append(tr.stats.station)
            # DOT plot where picks are picked, notice that for vertical trace plot p is queried from Pick table, s from PickModified table
            # horizontal trace plot p and s queried from PickModified table
            if channel == 'Z3':
                picks_p = self.assoc_db.query(
                    Pick.time).filter(Pick.assoc_id == assoc_id).filter(
                        Pick.sta == tr.stats.station).filter(
                            Pick.chan == tr.stats.channel).filter(
                                Pick.phase == 'P').all()
                if not picks_p:
                    picks_p = self.assoc_db.query(PickModified.time).filter(
                        PickModified.assoc_id == assoc_id).filter(
                            PickModified.sta == tr.stats.station).filter(
                                PickModified.phase == 'P').all()
                picks_s = self.assoc_db.query(PickModified.time).filter(
                    PickModified.assoc_id == assoc_id).filter(
                        PickModified.sta == tr.stats.station).filter(
                            PickModified.phase == 'S').all()
#         print picks_p,picks_s
            else:
                picks_p = self.assoc_db.query(PickModified.time).filter(
                    PickModified.assoc_id == assoc_id).filter(
                        PickModified.sta == tr.stats.station).filter(
                            PickModified.phase == 'P').all()
                picks_s = self.assoc_db.query(PickModified.time).filter(
                    PickModified.assoc_id == assoc_id).filter(
                        PickModified.sta == tr.stats.station).filter(
                            PickModified.phase == 'S').all()


#         print picks_p,picks_s
            picks = picks_p + picks_s
            #       picks=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).all()
            for pick, in picks:
                index = int(
                    (pick - eve.ot +
                     timedelta(seconds=seconds_ahead)).total_seconds() /
                    tr.stats.delta)  #;print pick,eve.ot,index,len(data)
                circle_x.append(distance + data[index])
                circle_y.append(t[index])
                # BAR plot where picks are picked
                t_picks = np.array([t[index], t[index]])
                data_picks = np.array([data.min(), data.max()])
                segs_picks.append(
                    np.hstack(
                        (data_picks[:, np.newaxis], t_picks[:, np.newaxis])))
                ticklocs_picks.append(distance)
        tick_max = max(ticklocs)
        tick_min = min(ticklocs)
        offsets = np.zeros((numRows, 2), dtype=float)
        offsets[:, 0] = ticklocs
        offsets_picks = np.zeros((len(segs_picks), 2), dtype=float)
        offsets_picks[:, 0] = ticklocs_picks

        #lines=LineCollection(segs,offsets=offsets,transOffset=None,linewidths=.25,colors=[colorConverter.to_rgba(i) for i in ('b','g','r','c','m','y','k')]) #color='gray'
        lines = LineCollection(segs,
                               offsets=offsets,
                               transOffset=None,
                               linewidths=.25,
                               color='gray')
        #lines_picks=LineCollection(segs_picks,offsets=offsets_picks,transOffset=None,linewidths=1,color='r')
        lines_picks = LineCollection(segs_picks,
                                     offsets=offsets_picks,
                                     transOffset=None,
                                     linewidths=1,
                                     color='k')

        #print sta,ticklocs
        fig = plt.figure(figsize=(15, 8))
        ax1 = fig.add_subplot(111)
        #ax1.plot(circle_x,circle_y,'o')  # blue dots indicating where to cross the waveforms
        ax1.plot(circle_x, circle_y, 'o', c='gray')
        x0 = tick_min - (tick_max - tick_min) * 0.1
        x1 = tick_max + (tick_max - tick_min) * 0.1
        ylim(0, record_length)
        xlim(0, x1)
        ax1.add_collection(lines)
        ax1.add_collection(lines_picks)
        ax1.set_xticks(ticklocs)
        ax1.set_xticklabels(sta)
        ax1.invert_yaxis()
        ax1.xaxis.tick_top()
        #     ax2 = ax1.twiny()
        #     ax2.xaxis.tick_bottom()
        plt.setp(plt.xticks()[1], rotation=45)
        #xlabel('Station (km)')
        xlabel('channel: ' + channel, fontsize=18)
        ylabel('Record Length (s)', fontsize=18)
        #     plt.title('Section Plot of Event at %s'%(tr.stats.starttime))
        #     plt.tight_layout()

        plt.show()
Exemple #2
0
  def section_plot(self, assoc_id, files, seconds_ahead = 5, record_length = 100, channel = 'Z'):
    
    station=self.assoc_db.query(Candidate.sta).filter(Candidate.assoc_id==assoc_id).all()
    sta_list=[]
    for sta, in station:
      sta_list.append(str(sta))
    station_single = self.assoc_db.query(Pick.sta).filter(Pick.assoc_id==assoc_id).filter(Pick.locate_flag == None).all()
    for sta, in station_single:
      sta_list.append(str(sta))
    #print sta_list
      
    eve=self.assoc_db.query(Associated).filter(Associated.id==assoc_id).first()
    # Earthquakes' epicenter
    eq_lat = eve.latitude
    eq_lon = eve.longitude
      
    # Reading the waveforms
    ST = Stream()
    for file in files:
      st = read(file)
      ST += st


    # in case of some seismometer use channel code like BH1, BH2 or BH3, resign the channel code as:
    if channel=='E' or channel=='e':
      Chan='E1'
    elif channel=='N' or channel=='n':
      Chan='N2'
    elif channel=='Z' or channel=='z':
      Chan='Z3'
    else:
      print('Please input component E, e, N, n, Z, or z, the default is Z')
    
    # Calculating distance from headers lat/lon
    ST_new = Stream()#;print ST
    for tr in ST:
      if tr.stats.channel[2] in Chan and tr.stats.station in sta_list:
        if tr.stats.starttime.datetime < eve.ot and tr.stats.endtime.datetime > eve.ot:
          tr.trim(UTCDateTime(eve.ot-timedelta(seconds=seconds_ahead)), UTCDateTime(eve.ot+timedelta(seconds=record_length)))
          ST_new+=tr
    #print ST_new.__str__(extended=True)


    while True:
      ST_new_sta=[]
      for tr in ST_new:
        ST_new_sta.append(tr.stats.station)
      duplicate=list(set([tr for tr in ST_new_sta if ST_new_sta.count(tr)>1]))
      if not duplicate:
        break
      index=[i for (i,j) in enumerate(ST_new_sta) if j==duplicate[-1]]
      i=0
      while True:
        if ST_new[index[i]].stats.npts<ST_new[index[i+1]].stats.npts:
          del ST_new[index[i]]
          break
        elif ST_new[index[i]].stats.npts>=ST_new[index[i+1]].stats.npts:
          del ST_new[index[i+1]]
          break
    #print ST_new.__str__(extended=True)     


    ST_new.detrend('demean')
#     ST_new.filter('bandpass', freqmin=0.1, freqmax=100)

    factor=10
    numRows=len(ST_new)
    segs=[];ticklocs=[];sta=[];circle_x=[];circle_y=[];segs_picks=[];ticklocs_picks=[]
    for tr in ST_new:
      dmax=tr.data.max()
      dmin=tr.data.min()
      data=tr.data/(dmax-dmin)*factor
      t=np.arange(0,round(tr.stats.npts/tr.stats.sampling_rate/tr.stats.delta))*tr.stats.delta # due to the float point arithmetic issue, can not use "t=np.arange(0,tr.stats.npts/tr.stats.sampling_rate,tr.stats.delta)"
      segs.append(np.hstack((data[:,np.newaxis],t[:,np.newaxis])))
      lon,lat = self.tt_stations_db_3D.query(Station3D.longitude,Station3D.latitude).filter(Station3D.sta==tr.stats.station).first()
      distance = int(gps2DistAzimuth(lat,lon,eq_lat,eq_lon)[0]/1000.)  #gps2DistAzimuth return in meters, convert to km by /1000
#       distance=self.assoc_db.query(Candidate.d_km).filter(Candidate.assoc_id==assoc_id).filter(Candidate.sta==tr.stats.station).first()[0]#;print distance,tr.stats.station
      ticklocs.append(distance)
      sta.append(tr.stats.station)
      # DOT plot where picks are picked, notice that for vertical trace plot p is queried from Pick table, s from PickModified table
      # horizontal trace plot p and s queried from PickModified table
      if channel=='Z3':
        picks_p=self.assoc_db.query(Pick.time).filter(Pick.assoc_id==assoc_id).filter(Pick.sta==tr.stats.station).filter(Pick.chan==tr.stats.channel).filter(Pick.phase=='P').all()
        if not picks_p:
          picks_p=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).filter(PickModified.phase=='P').all()
        picks_s=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).filter(PickModified.phase=='S').all()
#         print picks_p,picks_s
      else:
        picks_p=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).filter(PickModified.phase=='P').all()
        picks_s=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).filter(PickModified.phase=='S').all()
#         print picks_p,picks_s
      picks=picks_p+picks_s
#       picks=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).all()
      for pick, in picks:
        index=int((pick-eve.ot+timedelta(seconds=seconds_ahead)).total_seconds()/tr.stats.delta)#;print pick,eve.ot,index,len(data)
        circle_x.append(distance+data[index])
        circle_y.append(t[index])
        # BAR plot where picks are picked  
        t_picks=np.array([t[index],t[index]])
        data_picks=np.array([data.min(),data.max()])
        segs_picks.append(np.hstack((data_picks[:,np.newaxis],t_picks[:,np.newaxis])))
        ticklocs_picks.append(distance)
    tick_max=max(ticklocs)
    tick_min=min(ticklocs)
    offsets=np.zeros((numRows,2),dtype=float)
    offsets[:,0]=ticklocs
    offsets_picks=np.zeros((len(segs_picks),2),dtype=float)
    offsets_picks[:,0]=ticklocs_picks
    
    #lines=LineCollection(segs,offsets=offsets,transOffset=None,linewidths=.25,colors=[colorConverter.to_rgba(i) for i in ('b','g','r','c','m','y','k')]) #color='gray'
    lines=LineCollection(segs,offsets=offsets,transOffset=None,linewidths=.25,color='gray')
    #lines_picks=LineCollection(segs_picks,offsets=offsets_picks,transOffset=None,linewidths=1,color='r')
    lines_picks=LineCollection(segs_picks,offsets=offsets_picks,transOffset=None,linewidths=1,color='k')
    
    #print sta,ticklocs
    fig=plt.figure(figsize=(15,8))
    ax1 = fig.add_subplot(111)
    #ax1.plot(circle_x,circle_y,'o')  # blue dots indicating where to cross the waveforms
    ax1.plot(circle_x,circle_y,'o',c='gray')
    x0 = tick_min-(tick_max-tick_min)*0.1
    x1 = tick_max+(tick_max-tick_min)*0.1
    ylim(0,record_length);xlim(0,x1)
    ax1.add_collection(lines)
    ax1.add_collection(lines_picks)
    ax1.set_xticks(ticklocs)
    ax1.set_xticklabels(sta)
    ax1.invert_yaxis()
    ax1.xaxis.tick_top()
#     ax2 = ax1.twiny()
#     ax2.xaxis.tick_bottom()   
    plt.setp(plt.xticks()[1], rotation=45)
    #xlabel('Station (km)')
    xlabel('channel: '+channel, fontsize=18)
    ylabel('Record Length (s)', fontsize=18)
#     plt.title('Section Plot of Event at %s'%(tr.stats.starttime))
#     plt.tight_layout()
    
    plt.show()
Exemple #3
0
plots = False

HNx_st = Stream()
LNx_st = Stream()

# High Broad Band (H??)
HNx_st += read(data_path + "waveforms/USC/*HN*.mseed")
# Long Period (L??)
LNx_st += read(data_path + "waveforms/USC/*LN*.mseed")

if plots:
    HNx_st.plot()
    LNx_st.plot()

#%% Try to get Arias intensity ...
HNx_st = HNx_st.detrend()
LNx_st = LNx_st.detrend()

HNx_arias = get_arias(HNx_st)
LNx_arias = get_arias(LNx_st)

#%% Plot Arias
HNx_arias.plot()
LNx_arias.plot()

# ... Not what I expected ...
# Derp, needed to detrend at the very least

#%% Get a window around an earthquake to look at Arias inensity

# There is definitely an even late in the evening March 4th
Exemple #4
0
        'latitude':
        tr.stats.coordinates['latitude'],
        'longitude':
        tr.stats.coordinates['longitude'],
        'elevation':
        tr.stats.coordinates['elevation']
    })
    az12, az21, dist = reviewData.pyproj_distaz(
        tr.stats.coordinates['latitude'],
        tr.stats.coordinates['longitude'],
        evlat,
        evlon,
    )
    arr = model.get_travel_times(source_depth_in_km=dep,
                                 distance_in_degree=kilometer2degrees(dist),
                                 phase_list=['p', 'P'])
    tr.stats.rdist = dist
    tr.stats.ptime = arr[0].time

# Sort by distance from fault.
st.sort(['rdist'])

# Determine picktimes.
picktimes = [event_time + tr.stats.ptime for tr in st]

# Demean data.
st.detrend('demean')

# Open interactive data analysis.
zp = reviewData.InteractivePlot(st, picktimes=picktimes)
Exemple #5
0
def grab_file_data(filepath, scnl, tstart, tend, fill_value=0):
    import obspy
    from obspy import Stream, Trace
    import glob, os, itertools

    stas = []
    chas = []
    nets = []
    locs = []
    for sta in scnl:
        stas.append(sta.split('.')[0])
        chas.append(sta.split('.')[1])
        nets.append(sta.split('.')[2])
        if len(sta) == 4:
            locs.append(sta.split('.')[3])
        else:
            locs.append('')

    st = Stream()

    #if opt.server == 'file':
    if True:

        # Generate list of files
        #if opt.server == 'file':
        flist = list(
            itertools.chain.from_iterable(
                glob.iglob(os.path.join(root, "*"))
                for root, dirs, files in os.walk(filepath)))
        # "*" takes the place of wildcard lists, see REDPy documentation

        # Determine which subset of files to load based on start and end times and
        # station name; we'll fully deal with stations below
        flist_sub = []
        for f in flist:
            # Load header only
            stmp = obspy.read(f, headonly=True)
            # Check if station is contained in the stas list
            if stmp[0].stats.station in stas:
                # Check if contains either start or end time
                ststart = stmp[0].stats.starttime
                stend = stmp[0].stats.endtime
                if (ststart <= tstart and tstart <= stend) or (
                        ststart <= tend
                        and tend <= stend) or (tstart <= stend
                                               and ststart <= tend):
                    flist_sub.append(f)

        # Fully load data from file
        stmp = Stream()
        for f in flist_sub:
            tmp = obspy.read(f, starttime=tstart, endtime=tend)
            if len(tmp) > 0:
                stmp = stmp.extend(tmp)

        # Filter and merge
        #stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax, corners=2,
        #    zerophase=True)
        #stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
        #for m in range(len(stmp)):
        #    if stmp[m].stats.sampling_rate != opt.samprate:
        #        stmp[m] = stmp[m].resample(opt.samprate)
        stmp = stmp.merge(method=1, fill_value=fill_value)

        # Only grab stations/channels that we want and in order
        netlist = []
        stalist = []
        chalist = []
        loclist = []
        for s in stmp:
            stalist.append(s.stats.station)
            chalist.append(s.stats.channel)
            netlist.append(s.stats.network)
            loclist.append(s.stats.location)

        # Find match of SCNL in header or fill empty
        for n in range(len(stas)):
            for m in range(len(stalist)):
                if (stas[n] in stalist[m] and chas[n] in chalist[m]
                        and nets[n] in netlist[m] and locs[n] in loclist[m]):
                    st = st.append(stmp[m])
            if len(st) == n:
                print("Couldn't find " + stas[n] + '.' + chas[n] + '.' +
                      nets[n] + '.' + locs[n])
                trtmp = Trace()
                trtmp.stats.station = stas[n]
                st = st.append(trtmp.copy())

        if len(st) > 1:
            if fill_value == 0 or fill_value == None:
                st.detrend('demean')
                st.taper(max_percentage=0.01)
            st.merge(fill_value=fill_value)
        st.trim(tstart, tend, pad=0)
        st.detrend('demean')

    print(st)
    return st
def main():
    # hypo params
    win_size = 30  # in seconds
    step_len = 100  # length of each time step (frame size)
    step_stride = step_len / 2  # half overlap of time steps
    num_step = -(step_len / step_stride - 1) + win_size * 100 / step_stride

    out_class = 'test'
    stream_paths = '/data/WC_AItrain/finetune/Events/%s/*Z.SAC' % out_class
    stream_dir = '/data/WC_AItrain/finetune/Events/%s' % out_class
    output_dir = '/home/zhouyj/Documents/AIDP/data/%s/finetune/ppk_frame100_stride50' % out_class
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
    stream_files = sorted(glob.glob(stream_paths))

    done_file = []
    for stream_file in stream_files:

        # one-day's data in a tfrecord file
        sta, time, aug_idx, chn, _ = stream_file.split('.')
        jday = time[0:7]  # events happened on one day
        if [jday, aug_idx] not in done_file: done_file.append([jday, aug_idx])
        else: continue

        # Write event waveforms and labels in .tfrecords
        output_name = 'frames_' + jday + '_' + aug_idx + ".tfrecords"
        output_path = os.path.join(output_dir, output_name)
        writer = DataWriter(output_path)

        # Load stream
        stz_paths = sorted(
            glob.glob(stream_dir + '/*{}*.{}.BHZ.SAC'.format(jday, aug_idx)))

        # for all streams:
        for i, stz_path in enumerate(stz_paths):
            sta, time, aug_idx, chn, _ = stz_path.split('.')
            stx = '.'.join([sta, time, aug_idx, 'BHE', 'SAC'])
            sty = '.'.join([sta, time, aug_idx, 'BHN', 'SAC'])
            stz = '.'.join([sta, time, aug_idx, 'BHZ', 'SAC'])
            if not (os.path.exists(stx) and os.path.exists(sty)
                    and os.path.exists(stz)):
                print 'missing trace!'
                continue
            stream = Stream(traces=[read(stx)[0], read(sty)[0], read(stz)[0]])
            stream = stream.detrend('constant').filter(
                'highpass', freq=1.0).normalize()  #TODO
            # drop bad data
            if stream.max()[0] == 0.0 or stream.max()[1] == 0.0 or stream.max(
            )[2] == 0.0:
                print 'brocken trace!'
                continue

            # stream info
            n_traces = len(stream)
            n_samples = len(stream[0].data)
            n_pts = stream[0].stats.sampling_rate * win_size + 1
            lebel, p_time, s_time = 1, stream[0].stats.sac.t0, stream[
                0].stats.sac.t1
            # convert to time_steps and write to TFRecord
            if (n_traces == 3) and (n_pts == n_samples):
                # def input of RNN
                input_array = np.zeros((num_step, n_traces, step_len + 1),
                                       dtype=np.float32)
                # three chn data
                xdata = np.float32(stream[0].data)
                ydata = np.float32(stream[1].data)
                zdata = np.float32(stream[2].data)
                st_data = np.array([xdata, ydata, zdata])
                # convert to time steps
                for j in range(num_step):
                    idx_s = j * step_stride
                    idx_e = idx_s + step_len + 1
                    current_step = st_data[:, idx_s:idx_e]
                    input_array[j, :, :] = current_step

                # Write tfrecords
                writer.write(input_array, step_stride / 100., lebel, p_time,
                             s_time)
                print("+ Creating tfrecords for ppk time steps {}, idx = {}".
                      format(jday, i))
            else:
                print("Missing waveform for ppk time steps: %s" % (jday))
        writer.close()
AC = Stream(traces=[BHE[0],BHN[0],BHZ[0]])
ac = AC.copy()


# **Remove the instrument responses of the instruments from the recordings + convert units**
# - convert Ring Laser recordings to nrad/s units using a conversion factor
# - remove the seismometer response using poles and zeros + convert from velocity to acceleration [nm/s^2] in one step
# - trim the traces to make sure start- and endtimes match for both instruments

# In[3]:

RLAS.detrend(type='linear')
RLAS[0].data = RLAS[0].data * 1/6.3191 * 1e-3

AC.detrend(type='linear')
AC.taper(max_percentage=0.05)


paz_sts2 = {'poles': [(-0.0367429 + 0.036754j), (-0.0367429 - 0.036754j)],
            'sensitivity': 0.944019640,
            'zeros': [0j],
            'gain': 1.0}

AC.simulate(paz_remove=paz_sts2, remove_sensitivity=True)

startaim = max([tr.stats.starttime for tr in (AC + RLAS)])
endtaim = min([tr.stats.endtime for tr in (AC + RLAS)])

AC.trim(startaim, endtaim, nearest_sample=True)
RLAS.trim(startaim, endtaim, nearest_sample=True)
# -

# ### Remove the instrument responses from the recordings + convert units
#
# We want to deal with meaningful SI-units, so we need to correct the waveforms. <br>
# To convert ring laser's vertical rotation rate to [nrad/s] units, we can simply us a conversion factor:

RLAS.detrend(type='linear')
RLAS[0].data = RLAS[0].data * 1/6.3191 * 1e-3

# In order to remove the seismometer's response using and convert the velocity recordings to acceleration [nm/s²], <br>
# Obspy's <a href=http://docs.obspy.org/packages/autogen/obspy.core.stream.Stream.simulate.html?highlight=simulate#obspy.core.stream.Stream.simulate>simulate</a> function is used. <br>
# It removes the sensitivity and converts to acceleration in one step employing poles & zeros in this case.

# +
AC.detrend(type='linear')
AC.taper(max_percentage=0.05)

paz_sts2 = {'poles': [(-0.0367429 + 0.036754j), (-0.0367429 - 0.036754j)],
            'sensitivity': 0.944019640,
            'zeros': [0j],
            'gain': 1.0}

AC.simulate(paz_remove=paz_sts2, remove_sensitivity=True)
# -

# The resulting traces are trimmed to make sure that start- and endtimes match for all waveforms:

# +
startaim = max([tr.stats.starttime for tr in (AC + RLAS)])
endtaim = min([tr.stats.endtime for tr in (AC + RLAS)])