Exemple #1
0
    def readdisc(self,
                 waveform: str,
                 starttime: datetime,
                 endtime: datetime,
                 resample: bool = True,
                 fill_value: str = 'latest'):
        print(('readdisc', starttime, endtime))
        rawdata, channel, wavename = self.wavemeta[waveform]
        ntwk = re.sub('([^.]+)(.*)', '\\1', wavename)
        sttn = re.sub('([^.]+\.)([^.]+)(.*)', '\\2', wavename)

        if starttime is None:
            starttime = channel.start_date

        if endtime is None:
            endtime = channel.end_date


        outwave = rawdata.get_waveforms(network=ntwk, station=sttn, location=channel.location_code, \
                            channel=channel.code, starttime=starttime, endtime=endtime, tag="raw_recording")

        if len(outwave) > 0:
            gaps = outwave.get_gaps()

            mergewave = outwave[0]
            for w in outwave[1:]:
                mergewave = mergewave.__add__(w, fill_value=fill_value)

            outwave = mergewave

            rate = round(float(len(outwave.data)) / self.numofsamples)
            if resample == False or rate <= 1:
                pass
            elif rate > 16:
                tmp = trace.Trace()
                tmp.data = outwave.data[::rate].copy()
                tmp.meta['delta'] = outwave.meta['delta'] * rate
                tmp.meta['starttime'] = outwave.meta['starttime']
                outwave = tmp  # .decimate(1, True)
                #print(outwave.meta['endtime'], 'new endtime  ')
            elif rate >= 1:
                outwave.decimate(rate)
        else:
            outwave = trace.Trace()
            outwave.data = np.array([np.nan] * self.numofsamples)
            outwave.meta['starttime'] = starttime
            outwave.meta['delta'] = (endtime - starttime) / self.numofsamples

        # print(channel.start_date, channel.end_date,'==================')
        print("reading finished")
        return outwave, wavename, channel.start_date, channel.end_date, gaps
Exemple #2
0
    def produceStream(self, filepath):

        time, data = numpy.loadtxt(filepath, unpack=True)
        head, tail = os.path.split(filepath)
        tr = trace.Trace(data)

        try:
            #assuming that the information are in the filename following the usual convention
            tr.stats['network'] = tail.split('.')[1]
            tr.stats['station'] = tail.split('.')[0]
            tr.stats['channel'] = tail.split('.')[2]

            try:
                doc = etree.parse(
                    StringIO(open(self.parameters["stations_file"]).read()))
                ns = {"ns": "http://www.fdsn.org/xml/station/1"}
                tr.stats['latitude'] = self.num(
                    doc.xpath("//ns:Station[@code='" + tr.stats['station'] +
                              "']/ns:Latitude/text()",
                              namespaces=ns)[0])
                tr.stats['longitude'] = self.num(
                    doc.xpath("//ns:Station[@code='" + tr.stats['station'] +
                              "']/ns:Longitude/text()",
                              namespaces=ns)[0])
            except:

                with open(self.parameters["stations_file"]) as f:
                    k = False
                    for line in f:

                        if (k == False):
                            k = True
                        else:
                            station = {}
                            l = line.strip().split(" ")
                            if (tr.stats['station'] == l[0]):
                                tr.stats['latitude'] = float(l[3])
                                tr.stats['longitude'] = float(l[2])

        except:
            traceback.print_exc(file=sys.stderr)
            tr.stats['network'] = self.parameters["network"]
            tr.stats['station'] = self.parameters["station"]
            tr.stats['channel'] = self.parameters["channel"]

        tr.stats['starttime'] = time[0]
        delta = time[1] - time[0]
        tr.stats['delta'] = delta  #maybe decimal here
        tr.stats['sampling_rate'] = round(1. / delta, 1)  #maybe decimal here
        if filepath.endswith('.semv'):
            tr.stats['type'] = "velocity"
        if filepath.endswith('.sema'):
            tr.stats['type'] = 'acceleration'
        if filepath.endswith('.semd'):
            tr.stats['type'] = 'displacement'

        st = Stream()
        st += Stream(tr)
        return st
Exemple #3
0
    def getwaveformresample(self, shift):

        if self.decirate==1 and shift<0:
            pass
        elif self.decirate==16 and shift>0:
            pass
        elif shift==0:
            pass
        else:
            decirate = int(self.decirate + shift)

            if decirate < 1:
                decirate = 1
            if decirate > 16:
                decirate = 16




            self.currenttimes = trace.Trace(
                self.rawtimes).decimate(decirate,True).copy()


            self.currentdata = trace.Trace(
                self.rawdata).decimate(decirate,True).copy()



            gap = self.end-self.start
            self.start = int(float(self.start) * self.decirate / decirate)
            self.end = self.start+gap
            if self.end<1:
                self.end=1
            if self.end>len(self.currentdata):
                self.end=len(self.currentdata)


            self.decirate = decirate

            self.times = np.array(self.currenttimes[self.start: self.end])
            self.data = np.array(self.currentdata[self.start: self.end])

        return [self.times,
                self.data]
Exemple #4
0
def array2stream(array, sampling_rate, stream_info):
    nchans = array.shape[0]
    npts = array.shape[1]
    traces = []
    for c in range(nchans):
        traces.append(trace.Trace(data=array[c, :],
                                  header={'sampling_rate': sampling_rate,
                                          'station': stream_info['station'],
                                          'network': stream_info['network'],
                                          'channel': stream_info['channels'][c]}))
    return stream.Stream(traces)
def _acorr_trace(signal1, **kwargs):
    """
    Calculate phase auto correlation (pac)
    of signal1

    For this purpose a shifted copy in time of signal1 is compared to
    corresponding portion in signal1

    :type signal1: obspy.core.trace.Trace
    :param signal1: seismic trace
    :param kwargs:
    :return: auto-correlation as trace
    """
    kwargs['mode'] = 'pac'
    kwargs['lags'] = __default_lags_if_not_set(signal1, signal1, **kwargs)

    pac_signal = phasecorr.acorr(signal1.data, **kwargs)

    trace = _tr.Trace(data=pac_signal)

    __writeheader(trace, signal1, **kwargs)
    return trace
Exemple #6
0
    def getWave(self, starttime=None, endtime=None, decirate=None):

        if starttime is None:
            starttime = self.trace.meta['starttime']
        else:
            starttime = UTCDateTime(starttime)

        if endtime is None:
            endtime = self.trace.meta['endtime']
        else:
            endtime = UTCDateTime(endtime)

        if decirate is None:
            decirate = round((endtime - starttime) / 1000)

        if decirate < 1:
            decirate = 1

        if decirate != self.decirate:
            self.decirate = decirate
            self.sampled = False

        if self.sampled == True:
            pass
        elif decirate > 16:
            print('resampled', decirate)
            self.sampledTrace = trace.Trace()
            self.sampledTrace.data = self.trace.data[::decirate].copy()
            self.sampledTrace.meta[
                'delta'] = self.trace.meta['delta'] * decirate
            self.sampledTrace.meta['starttime'] = self.trace.meta['starttime']
            self.sampledTrace = self.sampledTrace.decimate(1, True)
        else:
            print('resampled', decirate)
            self.sampledTrace = self.trace.copy().decimate(self.decirate, True)

        self.sampled = True

        return self.sampledTrace.slice(starttime, endtime)
Exemple #7
0
    def writecache(self, waveform, timewindow):
        #print('writecache', timewindow)
        _, channel, _ = self.wavemeta[waveform]
        if (channel.end_date -
                channel.start_date) / channel.sample_rate < self.cachesize:
            outwave, wavename, start_date, end_date, gaps = \
                self.readdisc(waveform, channel.start_date, channel.end_date, False)

            if timewindow is None:
                timewindow = end_date - start_date

            rate = timewindow * channel.sample_rate / self.numofsamples
            rate = int(rate)

            if rate == 0:
                pass
            else:
                if rate <= 1:
                    pass
                elif rate > 16:
                    tmp = trace.Trace()
                    tmp.data = outwave.data[::rate].copy()
                    tmp.meta['delta'] = outwave.meta['delta'] * rate
                    tmp.meta['starttime'] = outwave.meta['starttime']
                    outwave = tmp  # .decimate(1, True)
                elif rate >= 1:
                    outwave.decimate(rate)

            wave = np.vstack(
                (outwave.times() + outwave.meta['starttime'].timestamp,
                 outwave.data))
            wave = np.array(wave).copy()
            self.wavecache[
                waveform] = timewindow, wave, wavename, start_date, end_date, gaps

            return True
        else:
            return False
def _xcorr_trace(signal1, signal2, **kwargs):
    """
    Calculate phase cross correlation (pcc)
    between signal1 and signal2

    For this purpose signal2 is shifted in time and compared to
    corresponding portion in signal1

    :type signal1: obspy.core.trace.Trace
    :type signal2: obspy.core.trace.Trace
    :param signal1: seismic trace
    :param signal2: seismic trace (wavelet) to correlate with
    :return: cross-correlation as trace
    """

    kwargs['mode'] = 'pcc'
    kwargs['lags'] = __default_lags_if_not_set(signal1, signal2, **kwargs)

    pcc_signal = phasecorr.xcorr(signal1.data, signal2.data, **kwargs)

    trace = _tr.Trace(data=pcc_signal)
    __writeheader(trace, signal1, **kwargs)

    return trace
Exemple #9
0
    os.makedirs(SavePostProcessDir)


dist_list = np.arange(60,141)  #[90,95,100,105,110,115,120,125,130,135]
cat = obspy.read_events(Dir+'/'+model+'/'+'input/CMTSOLUTION')
EventDepth = cat[0].origins[0].depth/1000.
# Loop through seismograms
count = 0
for idist, dist in enumerate(dist_list):
    print('proccessing distance %d' %dist)
    ReadData = np.loadtxt(Dir + '/' + model + '/' + 'output/stations/UV.D%d.RTZ.ascii' % dist)
    time = ReadData[:,0]
    # Total Stream
    seis = Stream()
    # R Component
    seisR = trace.Trace() 
    seisR.ts = np.arange(time[0],time[-1], delta)
    seisR.stats.delta = delta
    seisR.data = np.interp(seisR.ts,time,ReadData[:,1])
    seisR.stats.channel = 'BAR'
    seis+=seisR

    # T Component
    seisT = trace.Trace()  
    seisT.ts = np.arange(time[0],time[-1], delta)
    seisT.stats.delta = delta
    seisT.data = np.interp(seisT.ts,time,ReadData[:,2])
    seisT.stats.channel = 'BAT'
    seis+=seisT

    # Z Component
Exemple #10
0
                Pref.filter('bandpass',
                            freqmin=fmin,
                            freqmax=fmax,
                            corners=2,
                            zerophase=True)
                SVref.filter('bandpass',
                             freqmin=fmin,
                             freqmax=fmax,
                             corners=2,
                             zerophase=True)
                seis[0].stats['minfreq'] = fmin
                seis[0].stats['maxfreq'] = fmax
                Pref.taper(max_percentage=0.05, type='cosine')
                SVref.taper(max_percentage=0.05, type='cosine')
                RF = trace.Trace()

                if 1 == 1:

                    if flag == 'SV':
                        if filt == 'jgf1':
                            seis[0].jgf1 = dict()
                            out = seis[0].jgf1
                        elif filt == 'jgf2':
                            seis[0].jgf2 = dict()
                            out = seis[0].jgf2
                        elif filt == 'jgf3':
                            seis[0].jgf3 = dict()
                            out = seis[0].jgf3
                        elif filt == 'tff1':
                            seis[0].tff1 = dict()
Exemple #11
0
def stream_pxcorr(st, options, comm=None):
    """ 
    Preprocess and correlate traces in a stream

    This is the central function of this module. It takes an obspy.stram input
    stream applies and:
        - applies time domain preprocesing
        - Fourier transforms the data
        - applies frequency domain preprocessing
        - multiplies the conjugate spectra (correlation)
        - transforms back into time domain
        - returns a stream with the correlated data

    All this can be done in parallel on different CPU communicating via
    the `mpi4py` implementation of MPI. Control the different processing steps
    are controlled with the dictionary `options`. The following keys are
    required in `options`:
        - combinations: list of tuples that identify the combinations of the\\
            traces in `st` to be correlated
        - lengthToSave: length of the correlated traces in s to return
        - normalize_correlation: Boolean. If True the correaltion is\\
            normalized. If False the pure product of the spectra is returned
        - center_correlation: Boolean. If True th location of zero lag time\\
            is in the center of the returned trace. If False the position of\\
            zero lag time is determined by the start times of the traces. If\\
            they are identical it is in the center anyway. If the difference \\
            in start times is larger the zero lag time is offset.
        - TDpreProcessing: list controlling the time domain preprocessing
        - FDpreProcessing: list controlling the frequency domain preprocessing

    The item in the list `TDpreProcessing` and `FDpreProcessing` are 
    dictionaries with two keys: `function` containing the function to apply and
    `args` being a dictionary with the arguments for this function. The
    functions in `TDpreProcessing` are applied in their order before the
    Fourier transformation and those in FDpreProcessing` are applied in their
    order Fourier domain.

    :Example:
        ``options = {'TDpreProcessing':[{'function':detrend,
                                'args':{'type':'linear'}},
                               {'function':taper,
                                'args':{'type':'cosTaper',
                                        'p':0.01}},
                               {'function':TDfilter,
                                'args':{'type':'bandpass',
                                        'freqmin':1.,
                                        'freqmax':3.}},
                               {'function':TDnormalization,
                                'args':{'filter':{'type':'bandpass',
                                                 'freqmin':0.5,
                                                 'freqmax':2.},
                                        'windowLength':1.}},
                               {'function':signBitNormalization,
                                'args':{}}
                                 ],
            'FDpreProcessing':[{'function':spectralWhitening,
                                'args':{}},
                               {'function':FDfilter,
                                'args':{'flimit':[0.5, 1., 5., 7.]}}],
            'lengthToSave':20,
            'center_correlation':True,
            'normalize_correlation':True,
            'combinations':[(0,0),(0,1),(0,2),(1,2)]}``
    
    `comm` is a mpi4py communicator that can be passed if already initialized
    otherwise it is created here.

    :type st: obspy.stream
    :param st: stream with traces to be correlated
    :type options: dictionary
    :param options: controll dictionary as described above
    :type comm: mpi4py communicator
    :param comm: communicator if initialized externally
    """

    # initialize MPI
    if not comm:
        comm = MPI.COMM_WORLD
    rank = comm.Get_rank()

    # get parameters of the data
    if rank == 0:
        starttime = []
        npts = []
        for tr in st:
            starttime.append(tr.stats['starttime'])
            npts.append(tr.stats['npts'])
        npts = np.max(np.array(npts))
    else:
        starttime = None
        npts = None
    starttime = comm.bcast(starttime, root=0)
    npts = comm.bcast(npts, root=0)
    # fill matrix with noise data
    A = np.zeros([npts, len(st)])
    if rank == 0:
        for ii in range(len(st)):
            A[0:st[ii].stats['npts'], ii] = st[ii].data
    comm.Bcast([A, MPI.DOUBLE], root=0)
    options.update({
        'starttime': starttime,
        'sampling_rate': st[0].stats['sampling_rate']
    })

    # call pxcorr_for correlation
    A, starttime = pxcorr(comm, A, **options)
    npts = A.shape[0]

    # put trace into a stream
    cst = stream.Stream()
    for ii in range(len(options['combinations'])):
        cstats = combine_stats(st[options['combinations'][ii][0]],
                               st[options['combinations'][ii][1]])
        cstats['starttime'] = starttime[ii]
        cstats['npts'] = npts
        cst.append(trace.Trace(data=A[:, ii], header=cstats))
        cst[-1].stats_tr1 = st[options['combinations'][ii][0]].stats
        cst[-1].stats_tr2 = st[options['combinations'][ii][1]].stats

    return cst
Exemple #12
0
    f = open('/raid2/sc845/MTZ/Synthetics/AxiSEM/' + runs[0] +
             '/simulation.info')
    line = f.readlines()[8]
    print(line)
    val = line.split()
    depth = float(val[0])

    if not os.path.exists(dir):
        os.makedirs(dir)
    for i in range(len(stalist)):  #range(cat.count()):
        print(i)
        seis = Stream()

        #add Z trace #No rotation needed, as along equator
        SHref = trace.Trace()
        tmptimes, SHref.data = np.loadtxt(
            '/raid2/sc845/MTZ/Synthetics/AxiSEM/' + run +
            '/Data_Postprocessing/SEISMOGRAMS/' + stalist[i] +
            '_disp_post_mij_conv0001_Z.dat',
            unpack=True)

        # Setup times etc.
        SHref.ts = np.arange(tmptimes[0], tmptimes[-1], 0.04)
        SHref.stats.delta = 0.04
        SHref.data = np.interp(SHref.ts, tmptimes, SHref.data)
        SHref.stats.channel = 'BAZ'
        seis += SHref

        #add R trace #No rotation needed, as along equator
        SHref = trace.Trace()
def deconvolve_data(dataType, name, sourceName, idx1, idx2, deconType):

    fmax = .1
    fmin = 0.05
    savedist = []

    dir = 'Data/' + str(name) + '/'
    if (dataType == 1):
        dir = dir + 'RealData/'
    if (dataType == 2):
        dir = dir + 'CSEM/'
    if (dataType == 3):
        dir = dir + 'AxiSEM/'

    sourceseis = read(dir + sourceName + '.PICKLE', format='PICKLE')
    Stime = sourceseis[0].stats.traveltimes['S']
    source = sourceseis.select(channel='BHT')[0]
    source.filter('bandpass',
                  freqmin=fmin,
                  freqmax=fmax,
                  corners=2,
                  zerophase=True)
    source.data = np.gradient(source.data, source.stats.delta)

    tshift = sourceseis[0].stats['starttime'] - sourceseis[0].stats['eventtime']
    source.time = source.times()
    source.time = source.time[idx1:idx2]
    source.data = source.data[idx1:idx2]

    norm = 0.3 * np.max([np.max(source.data), np.max(source.data)])
    source.taper(max_percentage=0.05, type='cosine')

    seislist = glob.glob(dir + '/*PICKLE')

    plt.figure(figsize=(14, 10))

    plt.subplot(1, 3, 1)
    plt.plot(source.time, source.data, norm, 'k')

    plt.xlim([-25., 150])
    plt.ylim([-(1 / 0.3), (1 / 0.3)])
    plt.title('Source waveform')
    plt.xlabel('Time around S arrival (s)')

    dirLength = len(dir)

    savedir = dir + 'Deconvolved/'

    for i in range(len(seislist)):
        print(i + 1, len(seislist))

        station = seislist[i]
        station = station[dirLength:]
        station = station[:len(station) - 7]

        seis = read(seislist[i], format='PICKLE')
        Stime = seis[0].stats.traveltimes['S']
        SHref = seis.select(channel='BHT')[0]
        dist = seis[0].stats['dist']

        SHref.filter('bandpass',
                     freqmin=fmin,
                     freqmax=fmax,
                     corners=2,
                     zerophase=True)
        SHref.data = np.gradient(SHref.data, SHref.stats.delta)

        norm = 0.3 * np.max(abs(SHref.data))

        # Filter seismograms
        SHref = SHref.slice(seis[0].stats['eventtime'] + Stime - 25.,
                            seis[0].stats['eventtime'] + Stime + 150)
        RF = trace.Trace()

        if (deconType == 1):
            #RF.data, fit = itd.water_level_decon(SHref.data, source.data, 3.e-2, source.stats['delta'], 'cosine', .5, 25.)
            pass
        if (deconType == 2):
            RF.data, fit = itd.iterative_deconvolution(SHref.data, source.data,
                                                       200,
                                                       source.stats['delta'],
                                                       'cosine', .5, 25.)

        RF.data = 3. * RF.data / np.max(RF.data)
        RF.times = SHref.times()
        print(RF.data)
        print(fit)
        dist = np.round(dist)
        plt.subplot(1, 3, 2)
        plt.plot(SHref.times() - 25., RF.data + dist, 'k')
        plt.fill_between(SHref.times() - 25.,
                         dist,
                         RF.data + dist,
                         where=RF.data + dist > dist,
                         facecolor='r')
        plt.fill_between(SHref.times() - 25.,
                         dist,
                         RF.data + dist,
                         where=dist > RF.data + dist,
                         facecolor='b')
        plt.subplot(1, 3, 3)
        plt.plot(SHref.times() - 25., SHref.data / norm + dist, 'k')
        plt.fill_between(SHref.times() - 25.,
                         dist,
                         SHref.data / norm + dist,
                         where=SHref.data / norm + dist > dist,
                         facecolor='r')
        plt.fill_between(SHref.times() - 25.,
                         dist,
                         SHref.data / norm + dist,
                         where=dist > SHref.data / norm + dist,
                         facecolor='b')
        savedist.append(dist)

        # Save deconvolved waveforms so they can be accessed again.
        saveName = savedir + station
        RF.write(saveName + '.PICKLE', format='PICKLE')

    plt.subplot(1, 3, 2)
    plt.xlim([-25, 150])
    plt.ylim([min(savedist) - 2, max(savedist) + 2])
    plt.title('Deconvolved waveforms')
    plt.xlabel('Time around S arrival (s)')
    plt.ylabel('Distance (dg)')
    plt.subplot(1, 3, 3)
    plt.xlim([-25, 150])
    plt.ylim([min(savedist) - 2, max(savedist) + 2])
    plt.xlabel('Time around S arrival (s)')
    plt.ylabel('Distance (dg)')

    plt.title('Waveforms ' + savedir)

    plt.savefig('Plots/' + str(name) + '/' + sourceName + '_deconvolved.png',
                bbox_inches='tight')
    plt.savefig('Plots/' + str(name) + '/' + sourceName + '_deconvolved.pdf',
                bbox_inches='tight')
    plt.show()