示例#1
0
    month = path[-2:]
    lifile = glob.glob('*')
    for days in [x + 1 for x in range(31)]:
        day = ('%02d' % days)
        for hours in [x for x in range(24)]:
            hour = ('%02d' % hours)
            wav_files=[wav for wav in lifile if year in wav[0:4] \
            and month in wav[5:7] \
            and day in wav[8:10] \
            and hour in wav[11:13]]
            if not wav_files:
                continue
            else:
                output_name = '%s-%s-%s-%s00_%s.%s' % (year, month, day, hour,
                                                       suffix, wav_format)
                stream_wav = Stream()
                for i in range(len(wav_files)):
                    stream_wav += read(wav_files[i])
                stream_wav.write(output_name, format=("%s" % wav_format))
                stream_wav.clear()
            shutil.move(output_name, ("%s%s/%s/" % (final_base, year, month)))
            print "Processing...%s...Please wait" % output_name

sys.exit()
#				cat_wav=(' '.join(wav_files))
#				cat_command('cat %s > %s-%s-%s-%s
#				os.system(cat_command)
#				print wav_files
#				IPython.embed()
#				2008-07-21-1500_VANARC.MSEED
 def requestEventWaveformTraces(self, event):
     """
     Method for requesting event data from waveformLocation. Implement this to fetch event waveform data
     """
     stream = Stream()
     return stream
示例#3
0
文件: trigger.py 项目: jinwuLi/REDPy
def getData(tstart, tend, opt):

    """
    Download data from files in a folder, from IRIS, or a Earthworm waveserver
    
    A note on SAC/miniSEED files: as this makes no assumptions about the naming scheme of
    your data files, please ensure that your headers contain the correct SCNL information!

    tstart: UTCDateTime of beginning of period of interest
    tend: UTCDateTime of end of period of interest
    opt: Options object describing station/run parameters
    
    Returns ObsPy stream objects, one for cutting and the other for triggering
    """    
    
    nets = opt.network.split(',')
    stas = opt.station.split(',')
    locs = opt.location.split(',')
    chas = opt.channel.split(',')
    
    st = Stream()
    
    if opt.server == 'file':
    
        # Generate list of files
        if opt.server == 'file':
            flist = list(itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,opt.filepattern)) for root, dirs, files in os.walk(opt.searchdir)))
                
        # Determine which subset of files to load based on start and end times and
        # station name; we'll fully deal with stations below
        flist_sub = []
        for f in flist:
            # Load header only
            stmp = obspy.read(f, headonly=True)
            # Check if station is contained in the stas list
            if stmp[0].stats.station in stas:
                # Check if contains either start or end time
                ststart = stmp[0].stats.starttime
                stend = stmp[-1].stats.endtime
                if (ststart<=tstart and tstart<=stend) or (ststart<=tend and
                    tend<=stend) or (tstart<=stend and ststart<=tend):
                    flist_sub.append(f)
        
        # Fully load data from file
        stmp = Stream()
        for f in flist_sub:
            tmp = obspy.read(f, starttime=tstart, endtime=tend+opt.maxdt)
            if len(tmp) > 0:
                stmp = stmp.extend(tmp)
    
        # Filter and merge
        stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax, corners=2,
            zerophase=True)
        stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
        for m in range(len(stmp)):
            if stmp[m].stats.sampling_rate != opt.samprate:
                stmp[m] = stmp[m].resample(opt.samprate)
        stmp = stmp.merge(method=1, fill_value=0)
        
        # Only grab stations/channels that we want and in order
        netlist = []
        stalist = []
        chalist = []
        loclist = []
        for s in stmp:
            stalist.append(s.stats.station)
            chalist.append(s.stats.channel)
            netlist.append(s.stats.network)
            loclist.append(s.stats.location)
            
        # Find match of SCNL in header or fill empty
        for n in range(len(stas)):
            for m in range(len(stalist)):
                if (stas[n] in stalist[m] and chas[n] in chalist[m] and nets[n] in
                    netlist[m] and locs[n] in loclist[m]):
                    st = st.append(stmp[m])
            if len(st) == n:
                print("Couldn't find "+stas[n]+'.'+chas[n]+'.'+nets[n]+'.'+locs[n])
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                st = st.append(trtmp.copy())
    
    else:   
     
        if '.' not in opt.server:
            client = Client(opt.server)
        else:
            client = EWClient(opt.server, opt.port)
        
        for n in range(len(stas)):
            try:
                stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                        tstart, tend+opt.maxdt)
                for m in range(len(stmp)):
                    stmp[m].data = np.where(stmp[m].data == -2**31, 0, stmp[m].data) # replace -2**31 (Winston NaN token) w 0
                stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
                    corners=2, zerophase=True)
                stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
                for m in range(len(stmp)):
                    if stmp[m].stats.sampling_rate != opt.samprate:
                        stmp[m] = stmp[m].resample(opt.samprate)
                stmp = stmp.merge(method=1, fill_value=0)
            except (obspy.clients.fdsn.header.FDSNException):
                try: # try again
                    stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                            tstart, tend+opt.maxdt)
                    for m in range(len(stmp)):
                        stmp[m].data = np.where(stmp[m].data == -2**31, 0, stmp[m].data) # replace -2**31 (Winston NaN token) w 0
                    stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
                        corners=2, zerophase=True)
                    stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
                    for m in range(len(stmp)):
                        if stmp[m].stats.sampling_rate != opt.samprate:
                            stmp[m] = stmp[m].resample(opt.samprate)
                    stmp = stmp.merge(method=1, fill_value=0)
                except (obspy.clients.fdsn.header.FDSNException):
                    print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                    trtmp = Trace()
                    trtmp.stats.sampling_rate = opt.samprate
                    trtmp.stats.station = stas[n]
                    stmp = Stream().extend([trtmp.copy()])
                                            
            # Last check for length; catches problem with empty waveserver
            if len(stmp) != 1:
                print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                stmp = Stream().extend([trtmp.copy()])
                
            st.extend(stmp.copy()) 
    
    # Edit 'start' time if using offset option
    if opt.maxdt:
        dts = np.fromstring(opt.offset, sep=',')
        for n, tr in enumerate(st):
            tr.stats.starttime = tr.stats.starttime-dts[n]
    
    st = st.trim(starttime=tstart, endtime=tend, pad=True, fill_value=0)
    stC = st.copy()
    
    return st, stC
def removeGaps(self, min_gap, max_gap, verbose="False"): 

    """
    Returns the Stream object without trace gaps/overlaps.
    :param min_gap: All gaps smaller than this value will be omitted. The
          value is assumed to be in seconds. Defaults to None.
    :param max_gap: All gaps larger than this value will be omitted. The
          value is assumed to be in seconds. Defaults to None.
    :param verbose: stdout traces removed. Default verbose=False
    """

    new=Stream()
    self.sort()
    gap_list = []

    # since one would be left
    if(len(self) != 0):
      self.append(self[0])

    for _i in xrange(1,len(self.traces) - 0):
       # skip traces with different network, station, location or channel
       if self.traces[_i - 1].id != self.traces[_i + 0].id:
          new.append(self.traces[_i])
          continue
       # different sampling rates should always result in a gap or overlap
       if self.traces[_i - 1].stats.delta == self.traces[_i + 0].stats.delta:
          flag = True
       else:
          flag = False
       stats = self.traces[_i - 1].stats
       stime = stats['endtime']
       etime = self.traces[_i + 0].stats['starttime']
       delta = etime.timestamp - stime.timestamp

       # Check that any overlap is not larger than the trace coverage
       if delta < 0:
             temp = self.traces[_i + 0].stats['endtime'].timestamp - \
                    etime.timestamp
             if (delta * -1) > temp:
                 delta = -1 * temp
       # Check gap/overlap criteria
       if min_gap and delta < min_gap:
             new.append(self.traces[_i - 1])
             continue
       if max_gap and delta > max_gap:
             new.append(self.traces[_i - 1])
             continue
       # Number of missing samples
       nsamples = int(round(fabs(delta) * stats['sampling_rate']))
       # skip if is equal to delta (1 / sampling rate)
       if flag and nsamples == 1:
             new.append(self.traces[_i - 1])
             continue
       elif delta > 0:
             nsamples -= 1
       else:
             nsamples += 1

       gap_list.append([_i,stats['network'], stats['station'],
                             stats['location'], stats['channel'],
                             stime, etime, delta, nsamples])
       if verbose == "True" or verbose == "TRUE" or verbose == "true":
          print  "Removed because of gap: ",stats['network'],stats['station'],stats['channel'],stime,etime,delta, nsamples

    return new
示例#5
0
def calculate_gf(
        config: Optional[Config] = None) -> Union[List[Stream], List[Stream]]:
    """
    Compute displacements in cm in the up, radial (outward), and transverse (clockwise) directions produced by different seismic sources

    :param config: the configuration of calculating the Green's function, defaults to None
    :type config: Optional[Config], optional
    :return: if npt==2 or 1, return a 2D list, and each row represents the static displacements; otherwise, return a list of Stream, each stream keeps the order of GF as in FK, and the order of streams is the same as the receiver_distance.
    :rtype: Union[List[Stream], List[Stream]]
    """
    # * firstly, we calculate the travel time and ray parameter for vp and vs
    t0_vp: np.ndarray
    td_vp: np.ndarray
    p0_vp: np.ndarray
    pd_vp: np.ndarray
    t0_vs: np.ndarray
    td_vs: np.ndarray
    p0_vs: np.ndarray
    pd_vs: np.ndarray
    t0_vp, td_vp, p0_vp, pd_vp = taup(
        config.src_layer, config.rcv_layer, config.model.th.astype(np.float64),
        config.model.vp.astype(np.float64),
        config.receiver_distance.astype(np.float64))
    t0_vs, td_vs, p0_vs, pd_vs = taup(
        config.src_layer, config.rcv_layer, config.model.th.astype(np.float64),
        config.model.vs.astype(np.float64),
        config.receiver_distance.astype(np.float64))
    # * extract information from taup
    # first arrival array
    t0 = t0_vp
    # calculate the ray angle at the source
    dn, pa, sa = [
        np.zeros(len(config.receiver_distance), dtype=float)
        for index in range(3)
    ]
    # for each receiver, see calculate pa and sa
    for irec in range(len(config.receiver_distance)):
        if t0_vp[irec] < td_vp[irec] and p0_vp[irec] < 1. / 7:
            pa[irec] = config.model.vp[config.src_layer] * p0_vp[irec]
            dn[irec] = 1
        else:
            pa[irec] = config.model.vp[config.src_layer] * pd_vp[irec]
            dn[irec] = -1
        pa[irec] = np.rad2deg(
            np.arctan2(pa[irec], dn[irec] * np.sqrt(np.abs(1 - pa[irec]**2))))

        if t0_vs[irec] < td_vs[irec] and p0_vs[irec] < 1. / 4:
            sa[irec] = config.model.vs[config.src_layer] * p0_vs[irec]
            dn[irec] = 1
        else:
            sa[irec] = config.model.vs[config.src_layer] * pd_vs[irec]
            dn[irec] = -1
        sa[irec] = np.rad2deg(
            np.arctan2(sa[irec], dn[irec] * np.sqrt(np.abs(1 - sa[irec]**2))))

    # * if we should flip the model
    # get a copy of the earth model
    # ! note, we directly use model, src_layer, rcv_layer, as they might be fliped and we don't want to
    # ! change the config
    model = copy(config.model)
    src_layer = config.src_layer
    rcv_layer = config.rcv_layer
    flip: bool = False
    if rcv_layer > src_layer:
        flip = True
        src_layer = len(model.th) - src_layer
        rcv_layer = len(model.th) - rcv_layer
        # reverse the velocity model
        model.model_values = model.model_values[::-1, :]
    # for vs, it might be 0 in the sea, we assign a small value here
    model.model_values[:, 1][model.model_values[:, 1] < EPSILON] = EPSILON
    # get the source and receiver depth difference, the vs at source
    hs: float = 0.
    for index, value in enumerate(model.th):
        if rcv_layer <= index < src_layer:
            hs += value
    vs_source = model.vs[src_layer]

    # * calculate the si matrix representing source
    si = calculate_gf_source(config.source.srcType, model, flip, src_layer)

    # * initialize some parameters for waveform intergration
    dynamic = True
    nfft2 = int(config.npt / 2)
    wc1 = int(config.filter[0] * config.npt * config.dt) + 1
    wc2 = int(config.filter[1] * config.npt * config.dt) + 1
    if config.npt == 1:
        # it will never happen!
        dynamic = False
        nfft2 = 1
        wc1 = 1
    dw = np.pi * 2 / (config.npt * config.dt)
    sigma = config.suppression_sigma * dw / (np.pi * 2)
    wc = nfft2 * (1. - config.taper)
    if wc < 1:
        wc = 1
    else:
        wc = int(wc)
    # ! note, we will use taper, pmin, pmax, dk, sigma later
    taper = np.pi / (nfft2 - wc + 1)
    if wc2 > wc:
        wc2 = wc
    if wc1 > wc2:
        wc1 = wc2
    kc = config.kmax / hs
    pmin = config.pmin / vs_source
    pmax = config.pmax / vs_source
    xmax = np.max([hs, np.max(config.receiver_distance)])
    # update t0 based on number of samples before first arrival
    t0 -= config.samples_before_first_arrival * config.dt
    dk = config.dk * np.pi / xmax
    filter_const = dk / (np.pi * 2)
    # * main loop, calculate the green's function
    # * call the function from the cython module
    sum_waveform: np.ndarray = waveform_integration(
        model, config, src_layer, rcv_layer, taper, pmin, pmax, dk, nfft2, dw,
        kc, flip, filter_const, dynamic, wc1, wc2, t0, wc, si, sigma)
    # * with sum_waveform, we can apply the inverse fft acting as the frequency integration
    dt_smth = config.dt / config.smth
    nfft_smth = int(config.npt * config.smth)
    dfac = np.exp(sigma * dt_smth)
    if nfft2 == 1:
        static_return_list = []
        for irec in range(len(config.receiver_distance)):
            static_return_list.append(np.real(sum_waveform[irec, :, 0]))
        return static_return_list
    fac = np.array([dfac**index for index in range(nfft_smth)])
    nCom_mapper = {"dc": 9, "sf": 6, "ep": 3}
    nCom = nCom_mapper[config.source.srcType]

    # * do the ifftr
    gf_streamall = []
    # get correct t0 value
    for irec in range(len(config.receiver_distance)):
        stream_irec = Stream()
        for icom in range(nCom):
            waveform_freqdomain = np.hstack([
                sum_waveform[irec, icom, :],
                np.zeros(int(nfft_smth / 2) - nfft2, dtype=complex)
            ])
            gf_data = irfft(waveform_freqdomain, nfft_smth) / dt_smth
            # now we apply the frequency correction
            fac_icom = fac * np.exp(sigma * t0[irec])
            gf_data = gf_data * fac_icom
            stats_sac = {
                "delta": dt_smth,
                "b": t0_vp[irec],
                "e": nfft_smth * dt_smth + t0_vp[irec],
                "o": 0.0,
                "dist": config.receiver_distance[irec],
                "t1":
                t0_vp[irec] + config.samples_before_first_arrival * config.dt,
                "t2": t0_vs[irec],
                "user1": pa[irec],
                "user2": sa[irec],
                "npts": nfft_smth,
            }
            trace_irec_icom = Trace(data=gf_data, header={"sac": stats_sac})
            trace_irec_icom.stats.starttime += t0_vp[irec]
            trace_irec_icom.stats.delta = dt_smth
            stream_irec += trace_irec_icom
        gf_streamall.append(stream_irec)

    # * here the green's function is gf_streamall
    return gf_streamall
示例#6
0
def Collecting(queue, queue2):
    #Stats header information initialization
    stationId = 01
    stationName = 'Unknown'
    stationAddress = 'Unknown'
    longitude = 0.0
    latitude = 0.0
    elevation = 0.0
    dcShift = 0
    oldDCShift = 0

    #Check if user has already entered Station information, if yes, then go straight into 24 hour live plotting, if no create the initial station information input window
    if (os.path.exists('Station Information.txt') == False):
        app = wx.App(False)
        frame_5 = MyFrame4(None, wx.ID_ANY, "")
        app.SetTopWindow(frame_5)
        frame_5.Center()
        frame_5.Show()
        app.MainLoop()

    else:

        pass

    #Once user has entered the station information and that information is saved into a txt file, it is read line by line by the following lines of code and is parsed to extract the data required or header information
    file = open("Station Information.txt", "r")
    informationArray = file.readlines()

    for line in informationArray:
        if "Station ID" in line:
            stationId = line[line.find(":") + 1:line.find("\n")]
        if "Station Name" in line:
            stationName = line[line.find(":") + 1:line.find("\n")]
        if "Station Address" in line:
            stationAddress = line[line.find(":") + 1:line.find("\n")]
        if "Longitude" in line:
            longitude = line[line.find(":") + 1:line.find("\n")]
        if "Latitude" in line:
            latitude = line[line.find(":") + 1:line.find("\n")]
        if "Elevation" in line:
            elevation = line[line.find(":") + 1:line.find("\n")]
        if "DCShift" in line:
            dcShift = int(line[line.find(":") + 1::])

            oldDCShift = int(line[line.find(":") + 1::])
    file.close()

    #initializing further required variables
    mode = "None"
    currentMode = "24Hour"
    graphHeightConst = 2500  #distance between each 1 hour plot on the 24 hour plot
    totalHoursConst = 23  #used to decrement the hour so that once the plot reaches the end of 24 hours the plot is cleared and plotting starts from the top
    skipConst = 1  #currently not used, but in place to skip reading values coming in from the TC1 - eg. if it is 2, then it will read every second value
    count = 0
    lastHour = datetime.time(datetime.now()).hour
    hasHourChanged = False
    plotLimit = graphHeightConst * 7
    goldenNumber = 32750  #the center line of each plot, where it oscillates  - used to fix y axis according to this (32750 - graphHeightConstant which gives lower limit + graphHeightConstant * 25 (or how many ever hours gives upper limit))
    upperLim = 36000  #the top limit of each plot
    lowerLim = 28000  #bottom limit of each plot
    plotClear = False

    hourSeismicData = np.array(
        []
    )  #used to store 18 time values for every value read from the tc1 and sent is sent to the plotting array, then cleared for next 18 values
    tempSeismicData = np.array(
        []
    )  #used to store 18 value read from the tc1 and sent is sent to the plotting array, then cleared for next 18 values

    #hourMillisecondData = np.array([], dtype = np.float64)
    tempMillisecond = np.array([], dtype=np.float64)

    serialNumber = None
    serialPort = None

    #Returns the serialPort that the TC1 is connected to
    serialPort = getSerialPort()

    #This while loop ensures user has collected the TC1 before continuing
    while serialPort == None:
        easygui.msgbox("Please connect TC1 Seismometer", title="Warning")
        serialPort = getSerialPort()

    serialPort = serial.Serial(serialPort)
    serialPort.flushInput()
    serialPort.flushOutput()

    #create a stats object that holds all the station information retrieved from the txt file
    stats = initializeHeader(stationId, stationName, stationAddress, longitude,
                             latitude, elevation)

    #The following two lines create the secondary options window
    secondaryWindowProcess = Thread(target=secondaryWindow, args=(queue2, ))
    secondaryWindowProcess.start()

    queue.put("Start Plotting Process")

    while True:
        try:

            #Checks whether the user has changed the view selection in the options window from 24 hour to 1 hour or has increased or decreased the graphShift
            if (queue2.empty() == False):
                readingQueue2 = queue2.get()
                if readingQueue2 == "24-Hour-Plot":
                    mode = "24-Hour-Plot"
                    currentMode = "24Hour"
                    totalHoursConst = 23
                    tempSeismicData = np.array([])
                    tempMillisecond = np.array([])

                if readingQueue2 == "1-Hour-Plot":
                    mode = "1-Hour-Plot"
                    currentMode = "1Hour"
                    tempSeismicData = np.array([])
                    tempMillisecond = np.array([])
                if readingQueue2 == "UP":
                    tempSeismicData = np.array([])
                    tempMillisecond = np.array([])
                    dcShift += 100

                    for line in fileinput.input('Station Information.txt',
                                                inplace=True):
                        print line.replace('DCShift:' + str(oldDCShift),
                                           'DCShift:' + str(dcShift)),
                    oldDCShift = dcShift

                if readingQueue2 == "DOWN":
                    tempSeismicData = np.array([])
                    tempMillisecond = np.array([])
                    dcShift -= 100

                    #Every time the user changes the graphshift - the value in against the graphShift header in the StationInformation.txt file is updated
                    for line in fileinput.input('Station Information.txt',
                                                inplace=True):
                        print line.replace('DCShift:' + str(oldDCShift),
                                           'DCShift:' + str(dcShift)),
                    oldDCShift = dcShift

            #Read from the TC1 seismometer
            reading = int(serialPort.readline())

            if currentMode == "24Hour":
                #Depending on the hour and viewMode which is 24 or 1 hour plotting, the data value that is read is translated to the appropriate height
                data = [
                    int(reading + (graphHeightConst * totalHoursConst)) +
                    dcShift
                ]

            if currentMode == "1Hour":
                minute = (datetime.time(datetime.now())).minute
                if minute < 5:
                    data = [int(reading + (graphHeightConst * 11)) + dcShift]
                if minute < 10 and minute >= 5:
                    data = [int(reading + (graphHeightConst * 10)) + dcShift]
                if minute < 15 and minute >= 10:
                    data = [int(reading + (graphHeightConst * 9)) + dcShift]
                if minute < 20 and minute >= 15:
                    data = [int(reading + (graphHeightConst * 8)) + dcShift]
                if minute < 25 and minute >= 20:
                    data = [int(reading + (graphHeightConst * 7)) + dcShift]
                if minute < 30 and minute >= 25:
                    data = [int(reading + (graphHeightConst * 6)) + dcShift]
                if minute < 35 and minute >= 30:
                    data = [int(reading + (graphHeightConst * 5)) + dcShift]
                if minute < 40 and minute >= 35:
                    data = [int(reading + (graphHeightConst * 4)) + dcShift]
                if minute < 45 and minute >= 40:
                    data = [int(reading + (graphHeightConst * 3)) + dcShift]
                if minute < 50 and minute >= 45:
                    data = [int(reading + (graphHeightConst * 2)) + dcShift]
                if minute < 55 and minute >= 50:
                    data = [int(reading + (graphHeightConst * 1)) + dcShift]
                if minute < 60 and minute >= 55:
                    data = [int(reading + (graphHeightConst * 0)) + dcShift]

            timeNow = datetime.time(datetime.now())
            time = timeNow.minute + (timeNow.second +
                                     timeNow.microsecond / 1000000.0) / 60.0
            hour = timeNow.hour
            plotClear = False

            if (hour != lastHour):
                ## Everytime the hour changes the following code saves hour long SAC Files

                lastHour = hour
                currentTime = str(datetime.utcnow())
                now2 = currentTime.split(' ', 1)
                now3 = now2[1].split(':', 1)
                now3 = int(now3[0]) - 1
                if (now3 == -1):
                    now3 = 23

                stats['endtime'] = UTCDateTime()
                stats['ntps'] = len(hourSeismicData)

                st = Stream([Trace(data=hourSeismicData, header=stats)])

                sacdateAndTime = str(stats['starttime']).split('T')

                sacdate = sacdateAndTime[0].split('-')
                sactime = sacdateAndTime[1].split(':')
                sacyear = sacdate[0][2:]
                sacmonth = sacdate[1]
                sacday = sacdate[2]
                sachour = sactime[0]
                sacminute = sactime[1]
                fileNaame = str(sacyear + sacmonth + sacday + sachour +
                                sacminute + stats['station'] + ".sac")
                st.write(sacyear + sacmonth + sacday + sachour + sacminute +
                         stats['station'] + ".sac",
                         format='SAC')
                stats = initializeHeader(stationId, stationName,
                                         stationAddress, longitude, latitude,
                                         elevation)
                hourSeismicData = np.array([])

                ##Uploads SAC file right after creating it

                contentType = "application/octet-stream"  #image/png
                c = pycurl.Curl()
                c.setopt(
                    c.URL,
                    'https://nzseis.phy.auckland.ac.nz/pyjamaseis/upload/')
                c.setopt(c.HTTPHEADER, [
                    'Authorization:' +
                    'Basic %s' % base64.b64encode("kofi:pyjamaseis")
                ])
                c.setopt(c.HTTPPOST,
                         [("payload", (c.FORM_FILE, fileNaame,
                                       c.FORM_CONTENTTYPE, contentType)),
                          ("mode", "sac")])

                try:
                    c.perform()
                    c.close()
                except pycurl.error, error:
                    errno, errstr = error
                    print 'An error occurred: ', errstr

                totalHoursConst = totalHoursConst - 1
                if (totalHoursConst == -1):
                    plotClear = True
                    totalHoursConst = 23

                hasHourChanged = True

            if ((count % skipConst == 0) or hasHourChanged):
                if ((tempSeismicData.size >= 18) or hasHourChanged):

                    ##After every 18 values are read from the TC1 seismometer, the array containing these values along with the tempMillisecond array which contains the exact time the value was read put on the queue for the plotting process to read
                    queue.put([
                        tempSeismicData, tempMillisecond, hasHourChanged,
                        plotClear, mode
                    ])
                    mode = "None"
                    #the arrays are cleared
                    tempSeismicData = np.array([])
                    tempMillisecond = np.array([])
                    hasHourChanged = False

                else:
                    if currentMode == "1Hour":

                        tempSeismicData = np.append(tempSeismicData, data)

                        if time < 5:
                            tempMillisecond = np.append(tempMillisecond, time)
                        elif time < 10:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 5)
                        elif time < 15:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 10)
                        elif time < 20:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 15)
                        elif time < 25:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 20)
                        elif time < 30:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 25)
                        elif time < 35:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 30)
                        elif time < 40:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 35)
                        elif time < 45:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 40)
                        elif time < 50:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 45)
                        elif time < 55:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 50)
                        elif time < 60:
                            tempMillisecond = np.append(
                                tempMillisecond, time - 55)

                        hourSeismicData = np.append(hourSeismicData, reading)
                    else:
                        tempSeismicData = np.append(tempSeismicData, data)
                        tempMillisecond = np.append(tempMillisecond, time)
                        hourSeismicData = np.append(hourSeismicData, reading)

            count += 1

        except ValueError, e:
            print(e)
示例#7
0
def relcalstack(st1,
                st2,
                calib_file,
                window_len,
                overlap_frac=0.5,
                smooth=0,
                save_data=True):
    """
    Method for relative calibration of sensors using a sensor with known
    transfer function

    :param st1: Stream or Trace object, (known)
    :param st2: Stream or Trace object, (unknown)
    :type calib_file: str
    :param calib_file: file name of calibration file containing the PAZ of the
        known instrument in GSE2 standard.
    :type window_len: float
    :param window_len: length of sliding window in seconds
    :type overlap_frac: float
    :param overlap_frac: fraction of overlap, defaults to fifty percent (0.5)
    :type smooth: float
    :param smooth: variable that defines if the Konno-Ohmachi taper is used or
        not. default = 0 -> no taper generally used in geopsy: smooth = 40
    :type save_data: bool
    :param save_data: Whether or not to save the result to a file. If True, two
        output files will be created:
        * The new response in station_name.window_length.resp
        * The ref response in station_name.refResp
        Defaults to True
    :returns: frequency, amplitude and phase spectrum

    implemented after relcalstack.c by M.Ohrnberger and J.Wassermann.
    """
    # transform given trace objects to streams
    if isinstance(st1, Trace):
        st1 = Stream([st1])
    if isinstance(st2, Trace):
        st2 = Stream([st2])
    # check if sampling rate and trace length is the same
    if st1[0].stats.npts != st2[0].stats.npts:
        msg = "Traces don't have the same length!"
        raise ValueError(msg)
    elif st1[0].stats.sampling_rate != st2[0].stats.sampling_rate:
        msg = "Traces don't have the same sampling rate!"
        raise ValueError(msg)
    else:
        ndat1 = st1[0].stats.npts
        sampfreq = st1[0].stats.sampling_rate

    # read waveforms
    tr1 = st1[0].data.astype(np.float64)
    tr2 = st2[0].data.astype(np.float64)

    # get window length, nfft and frequency step
    ndat = int(window_len * sampfreq)
    nfft = nextpow2(ndat)

    # read calib file and calculate response function
    gg, _freq = _calcresp(calib_file, nfft, sampfreq)

    # calculate number of windows and overlap
    nwin = int(np.floor((ndat1 - nfft) / (nfft / 2)) + 1)
    noverlap = nfft * overlap_frac

    auto, _freq, _t = \
        spectral_helper(tr1, tr1, NFFT=nfft, Fs=sampfreq, noverlap=noverlap)
    cross, freq, _t = \
        spectral_helper(tr2, tr1, NFFT=nfft, Fs=sampfreq, noverlap=noverlap)

    res = (cross / auto).sum(axis=1) * gg

    # The first item might be zero. Problems with phase calculations.
    res = res[1:]
    freq = freq[1:]
    gg = gg[1:]

    res /= nwin
    # apply Konno-Ohmachi smoothing taper if chosen
    if smooth > 0:
        # Write in one matrix for performance reasons.
        spectra = np.empty((2, len(res.real)))
        spectra[0] = res.real
        spectra[1] = res.imag
        new_spectra = \
            konnoOhmachiSmoothing(spectra, freq, bandwidth=smooth, count=1,
                                  max_memory_usage=1024, normalize=True)
        res.real = new_spectra[0]
        res.imag = new_spectra[1]

    amp = np.abs(res)
    # include phase unwrapping
    phase = np.unwrap(np.angle(res))  # + 2.0 * np.pi
    ra = np.abs(gg)
    rpha = np.unwrap(np.angle(gg))

    if save_data:
        trans_new = (st2[0].stats.station + "." + st2[0].stats.channel + "." +
                     str(window_len) + ".resp")
        trans_ref = st1[0].stats.station + ".refResp"
        # Create empty array for easy saving
        temp = np.empty((len(freq), 3))
        temp[:, 0] = freq
        temp[:, 1] = amp
        temp[:, 2] = phase
        np.savetxt(trans_new, temp, fmt="%.10f")
        temp[:, 1] = ra
        temp[:, 2] = rpha
        np.savetxt(trans_ref, temp, fmt="%.10f")

    return freq, amp, phase
示例#8
0
elif data_type == 'smc':

    # Read in one component of acceleration.
    os.chdir(
        '/Users/tnye/PROJECTS/Duration/data/events/nc216859/ground_motion/smc')
    files_grabbed = []
    for file in glob.glob('*a.smc'):
        files_grabbed.append(file)

    st_list = []
    for file in files_grabbed:
        data = read_data(file)
        st_list.append(data)
    flat_list = [item for sublist in st_list for item in sublist]

    st = Stream([])
    for trace in flat_list:
        st.append(trace)

elif data_type == 'v2':

    # Read in one component of acceleration.
    os.chdir(
        '/Users/tnye/PROJECTS/Duration/data/events/nc72282711/ground_motion/v2'
    )
    files_grabbed = []
    for file in glob.glob('*.V2'):
        files_grabbed.append(file)

    st_list = []
    for file in files_grabbed:
示例#9
0
    def plot(self):
        stream = Stream()
        stream += self.BW_obs.BW_stream.traces[0]
        # stream += self.BW_obs.S_stream.traces[1]
        # stream += self.BW_obs.S_stream.traces[2]
        #
        #
        stream += self.BW_syn.BW_stream.traces[0]
        # stream += self.BW_syn.S_stream.traces[1]
        # stream += self.BW_syn.S_stream.traces[2]
        #
        stream.plot()

        fig = plt.figure(figsize=(10, 12))
        delta = self.BW_obs.P_stream.traces[0].meta.delta
        p_time_array = np.arange(len(self.BW_obs.P_stream.traces[0].data)) * delta
        s_time_array = np.arange(len(self.BW_obs.S_stream.traces[0].data)) * delta

        start_P = int((self.BW_obs.start_P.timestamp - self.or_time.timestamp - 10) / delta)
        end_P = int((self.BW_obs.start_P.timestamp - self.or_time.timestamp + 30) / delta)

        start_S = int((self.BW_obs.start_S.timestamp - self.or_time.timestamp - 20) / delta)
        end_S = int((self.BW_obs.start_S.timestamp - self.or_time.timestamp + 100) / delta)

        ax1 = plt.subplot2grid((5, 1), (0, 0))
        plt.plot(p_time_array[start_P:end_P], self.BW_obs.P_stream.traces[0].data[start_P:end_P], 'b', label='Observed')
        plt.plot(p_time_array[start_P:end_P], self.BW_syn.P_stream.traces[0].data[start_P:end_P], 'r',
                 label='Synthetic')
        ymin, ymax = ax1.get_ylim()
        xmin, xmax = ax1.get_xlim()
        plt.text(xmax - 5, ymax / 1.7, "P-Z", fontsize=20, color='b')
        ax1.ticklabel_format(style="sci", axis='y', scilimits=(-2, 2))
        ax1.tick_params(axis='x', labelsize=18)
        ax1.tick_params(axis='y', labelsize=18)
        plt.tight_layout()
        plt.legend(loc='lower left', fontsize=15)

        ax2 = plt.subplot2grid((5, 1), (1, 0))
        plt.plot(p_time_array[start_P:end_P], self.BW_obs.P_stream.traces[1].data[start_P:end_P], 'b')
        plt.plot(p_time_array[start_P:end_P], self.BW_syn.P_stream.traces[1].data[start_P:end_P], 'r')
        ymin, ymax = ax2.get_ylim()
        xmin, xmax = ax2.get_xlim()
        plt.text(xmax - 5, ymax / 1.7, "P-R", fontsize=20, color='b')
        ax2.ticklabel_format(style="sci", axis='y', scilimits=(-2, 2))
        ax2.tick_params(axis='x', labelsize=18)
        ax2.tick_params(axis='y', labelsize=18)
        plt.tight_layout()

        ax3 = plt.subplot2grid((5, 1), (2, 0))
        plt.plot(s_time_array[start_S:end_S], self.BW_obs.S_stream.traces[0].data[start_S:end_S], 'b')
        plt.plot(s_time_array[start_S:end_S], self.BW_syn.S_stream.traces[0].data[start_S:end_S], 'r')
        ymin, ymax = ax3.get_ylim()
        xmin, xmax = ax3.get_xlim()
        plt.text(xmax - 10, ymax / 1.7, "S-Z", fontsize=20, color='b')
        ax3.ticklabel_format(style="sci", axis='y', scilimits=(-2, 2))
        ax3.tick_params(axis='x', labelsize=18)
        ax3.tick_params(axis='y', labelsize=18)
        plt.tight_layout()

        ax4 = plt.subplot2grid((5, 1), (3, 0))
        plt.plot(s_time_array[start_S:end_S], self.BW_obs.S_stream.traces[1].data[start_S:end_S], 'b')
        plt.plot(s_time_array[start_S:end_S], self.BW_syn.S_stream.traces[1].data[start_S:end_S], 'r')
        ymin, ymax = ax4.get_ylim()
        xmin, xmax = ax4.get_xlim()
        plt.text(xmax - 10, ymax / 1.7, "S-R", fontsize=20, color='b')
        ax4.ticklabel_format(style="sci", axis='y', scilimits=(-2, 2))
        ax4.tick_params(axis='x', labelsize=18)
        ax4.tick_params(axis='y', labelsize=18)
        plt.tight_layout()

        ax5 = plt.subplot2grid((5, 1), (4, 0))
        plt.plot(s_time_array[start_S:end_S], self.BW_obs.S_stream.traces[2].data[start_S:end_S], 'b')
        plt.plot(s_time_array[start_S:end_S], self.BW_syn.S_stream.traces[2].data[start_S:end_S], 'r')
        ymin, ymax = ax5.get_ylim()
        xmin, xmax = ax5.get_xlim()
        plt.text(xmax - 10, ymax / 1.9, "S-T", fontsize=20, color='b')
        ax5.ticklabel_format(style="sci", axis='y', scilimits=(-2, 2))
        ax5.tick_params(axis='x', labelsize=18)
        ax5.tick_params(axis='y', labelsize=18)
        ax5.set_xlabel(self.BW_obs.start_P.strftime('From P arrival: %Y-%m-%dT%H:%M:%S + [sec]'), fontsize=18)
        plt.tight_layout()
        # plt.show()
        plt.savefig(self.prior['save_dir'] + '/plots/%s_%i.png' % (self.prior['save_name'], self.i))
        # plt.show()
        plt.close()
示例#10
0
    def Get_bw_windows(self,
                       stream,
                       UNKNOWN_1,
                       UNKNOWN_2,
                       or_time,
                       Full_P_shift=None,
                       Full_S_shift=None,
                       MANUAL=False):
        ## YOU can do EITHER MANUAL or not:
        """   if MANUAL = False:
                UNKNOWN_1 = EPI
                UNKNOWN_2 = DEPTH
              if MANUAL = True:
                UNKNOWN_1 = tt_P
                UNKNOWN_2 = tt_S

            Full_P_shift = extra shift applied to synthetic, so should be None if Observed
            Full_S_shift = extra shift applied to synthetic, so should be None if Observed

        """

        self.dt = stream.traces[0].stats.delta
        self.original = stream.copy()
        or_time_sec = or_time.timestamp

        if MANUAL == False:
            epi = UNKNOWN_1
            depth = UNKNOWN_2
            tt_P = self.get_P(epi, depth)
            tt_S = self.get_S(epi, depth)
            self.start_P = obspy.UTCDateTime(or_time_sec + tt_P - self.Pre_P)
            self.or_P_len = int(
                (self.start_P - or_time) / stream.traces[0].stats.delta)
            self.start_S = obspy.UTCDateTime(or_time_sec + tt_S - self.Pre_S)
            self.or_S_len = int(
                (self.start_S - or_time) / stream.traces[0].stats.delta)
            # print(self.start_P)
            # print(self.start_S)

            end_P = obspy.UTCDateTime(or_time_sec + tt_P + self.Post_P)
            end_S = obspy.UTCDateTime(or_time_sec + tt_S + self.Post_S)

        else:
            tt_P = UNKNOWN_1
            tt_S = UNKNOWN_2
            self.start_P = obspy.UTCDateTime(tt_P.timestamp - self.Pre_P)
            self.or_P_len = int(
                (self.start_P - or_time) / stream.traces[0].stats.delta)
            self.start_S = obspy.UTCDateTime(tt_S.timestamp - self.Pre_S)
            self.or_S_len = int(
                (self.start_S - or_time) / stream.traces[0].stats.delta)

            end_P = obspy.UTCDateTime(tt_P.timestamp + self.Post_P)
            end_S = obspy.UTCDateTime(tt_S.timestamp + self.Post_S)

        if Full_P_shift == None:
            Full_P_shift = 0
        else:
            Full_P_shift = Full_P_shift * self.dt

        if Full_S_shift == None:
            Full_S_shift = 0
        else:
            Full_S_shift = Full_S_shift * self.dt

        self.S_original = self.original.copy()
        self.P_original = self.original.copy()
        if self.Taper == True:
            self.P_original.taper(
                0.025, 'hann', self.start_P - or_time -
                10)  # Making sure the P and S wave are not effected
            self.S_original.taper(0.025, 'hann', self.start_P - or_time - 10)

            # CHECK IN OBSPY FUNCTION IF TAPER IS GOOD:
            # import matplotlib.pylab as plt
            # plt.close()
            # x = np.arange(len(taper))
            # plt.plot(x, taper, 'r', label='Hann Taper')
            # plt.plot(3606, 1, 'bx', label='P-arrival')
            # plt.plot(6806, 1, 'kx', label='S-arrival')
            # plt.legend()
            # plt.show()

        if self.zero_phase:
            self.P_original.filter('highpass',
                                   freq=1. / (end_P - self.start_P),
                                   zerophase=True,
                                   corners=self.Order)
            self.P_original.filter('highpass',
                                   freq=1. / (end_S - self.start_S),
                                   zerophase=True,
                                   corners=self.Order)
            self.S_original.filter('highpass',
                                   freq=1. / (end_P - self.start_P),
                                   zerophase=True,
                                   corners=self.Order)
            self.S_original.filter('highpass',
                                   freq=1. / (end_S - self.start_S),
                                   zerophase=True,
                                   corners=self.Order)
        else:
            self.P_original.filter('highpass',
                                   freq=1. / (end_P - self.start_P),
                                   corners=self.Order)
            self.P_original.filter('highpass',
                                   freq=1. / (end_S - self.start_S),
                                   corners=self.Order)
            self.S_original.filter('highpass',
                                   freq=1. / (end_P - self.start_P),
                                   corners=self.Order)
            self.S_original.filter('highpass',
                                   freq=1. / (end_S - self.start_S),
                                   corners=self.Order)

        self.P_original = self.Filter(self.P_original,
                                      HP=self.P_HP,
                                      LP=self.P_LP)
        self.S_original = self.Filter(self.S_original,
                                      HP=self.S_HP,
                                      LP=self.S_LP)

        wlen_seconds = self.Taper_Len
        zero_len = self.Zero_len
        # wlen = int(wlen_seconds / self.dt)

        P_stream = Stream()
        S_stream = Stream()
        for i in range(0, len(stream.traces)):
            trace_P = self.P_original.traces[i].copy()
            trace_S = self.S_original.traces[i].copy()
            dt = trace_P.meta.delta

            P_trace = Trace.slice(trace_P,
                                  self.start_P - wlen_seconds + Full_P_shift,
                                  end_P + wlen_seconds + Full_P_shift)
            self.P_len = len(P_trace)
            npts_p = self.P_len + 2 * zero_len
            start_p = dt * zero_len
            S_trace = Trace.slice(trace_S,
                                  self.start_S - wlen_seconds + Full_S_shift,
                                  end_S + wlen_seconds + Full_S_shift)
            self.S_len = len(S_trace)
            npts_s = self.S_len + 2 * zero_len
            start_s = dt * zero_len

            if i == 2:
                total_s_trace = Trace(np.zeros(npts_s),
                                      header={
                                          "starttime":
                                          self.start_S - start_s -
                                          wlen_seconds + Full_S_shift,
                                          'delta':
                                          trace_S.stats.delta,
                                          "station":
                                          trace_S.stats.station,
                                          "network":
                                          trace_S.stats.network,
                                          "location":
                                          trace_S.stats.location,
                                          "channel":
                                          trace_S.stats.channel
                                      }).__add__(S_trace,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=S_trace.data,
                                                 sanity_checks=True)

            else:
                total_p_trace = Trace(np.zeros(npts_p),
                                      header={
                                          "starttime":
                                          self.start_P - start_p -
                                          wlen_seconds + Full_P_shift,
                                          'delta':
                                          trace_P.stats.delta,
                                          "station":
                                          trace_P.stats.station,
                                          "network":
                                          trace_P.stats.network,
                                          "location":
                                          trace_P.stats.location,
                                          "channel":
                                          trace_P.stats.channel
                                      }).__add__(P_trace,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=P_trace.data,
                                                 sanity_checks=True)
                total_s_trace = Trace(np.zeros(npts_s),
                                      header={
                                          "starttime":
                                          self.start_S - start_s -
                                          wlen_seconds + Full_S_shift,
                                          'delta':
                                          trace_S.stats.delta,
                                          "station":
                                          trace_S.stats.station,
                                          "network":
                                          trace_S.stats.network,
                                          "location":
                                          trace_S.stats.location,
                                          "channel":
                                          trace_S.stats.channel
                                      }).__add__(S_trace,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=S_trace.data,
                                                 sanity_checks=True)
                P_stream.append(total_p_trace)
            S_stream.append(total_s_trace)
            self.S_stream = S_stream
            self.P_stream = P_stream
示例#11
0
def invert_raw():
    ######################################
    # Binary
    ######################################
    if mode == "Binary":
        catch_files = []
        files = glob.glob(file_path + "*")
        for file in files:
            catch = re.findall(
                ".*[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}_[0-9]{2}_[0-9]{2}\.[0-9]{6}",
                file)
            if len(catch) > 0:
                catch_files.append(file)

        ######################################
        # Freq file
        ######################################
        freq_file = glob.glob(file_path + "*_freq")
        if len(freq_file) > 1:
            print "warning : more than one freq file in folder"
        if len(freq_file) == 0:
            print "warning no freq file discovered use :" + str(sampling_freq)
        else:
            content = "40.000000"
            with open(freq_file[0], "r") as f:
                content = f.read()
            sampling_freq = float(content)
            print "Sampling used : " + str(sampling_freq)
        files_nb = len(catch_files)
        file_offset = 1
        for catch_file in catch_files:
            print catch_file
            print "File nb : " + str(file_offset) + "/" + str(files_nb)
            date = UTCDateTime(
                re.findall(
                    ".*([0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}_[0-9]{2}_[0-9]{2}\.[0-9]{6})",
                    catch_file)[0])
            rawdata = numpy.fromfile(catch_file, numpy.int32)
            ######################################
            # Plot plotly file
            ######################################
            # Add acoustic values to the graph
            #data_line = graph.Scattergl(x=[date + i/sampling_freq for i in range(0,len(rawdata))],
            #                          y=rawdata,
            #                          name="counts",
            #                          line=dict(color='blue', width=2),
            #                          mode='lines')

            #plotlydata = [data_line]

            #layout = graph.Layout(title="Plot",
            #                      xaxis=dict(title='Date', titlefont=dict(size=18)),
            #                      yaxis=dict(title='Counts', titlefont=dict(size=18)),
            #                      hovermode='closest'
            #                      )

            #plotly.plot({'data': plotlydata, 'layout': layout},
            #            filename=catch_file + ".html",
            #            auto_open=False)

            ######################################
            # Create SAC file
            ######################################

            # Fill header info
            stats = Stats()
            stats.sampling_rate = sampling_freq
            stats.network = "test"
            stats.station = 0
            stats.starttime = date
            stats.sac = dict()

            # Save data into a Stream object
            trace = Trace()
            trace.stats = stats
            trace.data = rawdata
            stream = Stream(traces=[trace])

            # Save stream object
            stream.write(catch_file + ".sac", format='SAC')
            stream.write(catch_file + ".mseed", format='MSEED')
            file_offset = file_offset + 1
    else:
        ######################################
        # Text
        ######################################
        #filename = "tool_invert_raw/1553771378.490936"
        #date = UTCDateTime(1553771378.490936)
        # text
        #f = open(filename, 'r')
        #rawdata = numpy.array(f.read().rstrip('\n').split('\n'))
        #f.close()
        # binary
        ######################################
        # Plot plotly file
        ######################################

        # Add acoustic values to the graph
        data_line = graph.Scattergl(
            x=[date + i / sampling_freq for i in range(0, len(rawdata))],
            y=rawdata,
            name="counts",
            line=dict(color='blue', width=2),
            mode='lines')

        plotlydata = [data_line]

        layout = graph.Layout(title="Plot",
                              xaxis=dict(title='Date',
                                         titlefont=dict(size=18)),
                              yaxis=dict(title='Counts',
                                         titlefont=dict(size=18)),
                              hovermode='closest')

        plotly.plot({
            'data': plotlydata,
            'layout': layout
        },
                    filename=filename + ".html",
                    auto_open=False)

        ######################################
        # Create SAC file
        ######################################

        # Fill header info
        stats = Stats()
        stats.sampling_rate = sampling_freq
        stats.network = "test"
        stats.station = 0
        stats.starttime = date
        stats.sac = dict()

        # Save data into a Stream object
        trace = Trace()
        trace.stats = stats
        trace.data = rawdata
        stream = Stream(traces=[trace])

        # Save stream object
        stream.write(filename + ".sac", format='SAC')
# # Gudkova -> P = 1.8, S = 2
# model_Pangles = [27.9,56.6,19.4,25.9,30.5,27.5,26.7]
# model_Sangles = [24,58,19.6,23.4,26.3,24,22.9]

model_ls = ['NewGudkova']
# Gudkova -> P = 1.8, S = 2
model_Pangles = [25.9]
model_Sangles = [23.4]

n = 0
for a in model_Pangles:
    print('P:' + model_ls[n])
    stP = stP_og.copy()
    hhQ,hhL = rotate(stP[1].data, stP[2].data, a)
    t1, t2, t3 = Trace(stP[0].data, header=headerP), Trace(hhQ, header=headerP), Trace(hhL, header=headerP)
    stP_LQ = Stream(traces=[t1,t2,t3])
    stP_LQ[0].stats.component = 'T'
    stP_LQ[1].stats.component = 'Q'
    stP_LQ[2].stats.component = 'L'

    stP_LQ.plot(equal_scale=True);
    n += 1


#S-wave
n = 0
for a in model_Sangles:
    print('S:' + model_ls[n])
    stS = stS_og.copy()
    hhQ,hhL = rotate(stS[1].data, stS[2].data, a)
    t1, t2, t3 = Trace(stS[0].data, header=headerS), Trace(hhQ, header=headerS), Trace(hhL, header=headerS)
示例#13
0
def process(stream,
            amp_min,
            amp_max,
            window_vmin,
            taper_type,
            taper_percentage,
            taper_side,
            get_corners,
            sn_ratio,
            max_low_freq,
            min_high_freq,
            default_low_frequency,
            default_high_frequency,
            filters,
            baseline_correct,
            event_time=None,
            epi_dist=None):
    """
    Processes an acceleration trace following the step-by-step process
    described in the Rennolet et al paper
    (https://doi.org/10.1193/101916EQS175DP)
    This function completes
    Step 4 through Step 11 from the paper.

    - Amplitude
    - Windowing
    - Identify corner frequencies for filtering
        - Split signal and noise windows
        - Taper, compute FFT, Konno Ohmachi smoothing
        - Signal-to-noise ratio
    - Filter
    - Polynomial baseline correction

    This processing should be performed on data with physical units (acc,
    vel, etc).

    Along with the stream, this function requires two pieces of metadata
    (origin time of the event and epicentral distance to the station) to
    complete all processing steps.

    To process with defaults use process_config with the amptools default
    config dictionary.

    Args:
        stream (obspy.core.stream.Stream): Stream for one station.
        amp_min (float): Lower amplitude limit for step 4.
        amp_max (float): Upper amplitude limit for step 4.
        window_vmin (float): Minimum velocity for step 5.
        taper_type (str): Type of taper for step 6.
        taper_percentage (float): Maximum taper percentage for step 6.
        taper_side (str): Sides to taper for step 6.
        get_corners (bool): Whether to complete step 8 or use defaults.
        sn_ratio (float): Signal to noise ratio.
        max_low_freq (float): Maxmium low corner frequency allowed.
        min_high_freq (float): Minimum high corner frequency allowed.
        default_low_frequency (float): Default minimum frequency used in
                place of corners calculated from step 8.
        default_high_frequency (float): Default maximum frequency used in
                place of corners calculated from step 8.
        filters (list): List of filters (dict) with type (str), corners (int),
                and zerophase (bool) defined.
        baseline_correct (bool): Whether or not to complete step 11.
        event_time (UTCDateTime): Origin time of the event. Default is None.
        epi_dist (float): Epicentral distance. Default is None.

    Returns:
        obspy.core.stream.Stream: Processed stream.
    """
    horizontals = []
    for trace in stream:
        if trace.stats['channel'].upper().find('Z') < 0:
            horizontals += [trace.stats['channel']]
    horizontal_corners = {}
    low_freqs = []
    high_freqs = []

    processed_streams = Stream()

    for trace in stream:
        trace_copy = trace.copy()
        # The stats need to be set even if the process checks fail
        trace_copy = _update_params(trace_copy, 'amplitude', {
            'min': amp_min,
            'max': amp_min
        })
        trace_copy = _update_params(trace_copy, 'window',
                                    {'vmin': window_vmin})
        trace_copy = _update_params(
            trace_copy, 'taper', {
                'type': taper_type,
                'side': taper_side,
                'max_percentage': taper_percentage
            })
        trace_copy = _update_params(
            trace_copy, 'corners', {
                'get_dynamically': get_corners,
                'sn_ratio': sn_ratio,
                'max_low_freq': max_low_freq,
                'min_high_freq': min_high_freq,
                'default_low_frequency': default_low_frequency,
                'default_high_frequency': default_high_frequency
            })
        trace_copy = _update_params(trace_copy, 'filters', [])
        trace_copy = _update_params(trace_copy, 'baseline_correct',
                                    baseline_correct)
        trace_copy.stats['passed_tests'] = True

        # Check amplitude
        if not check_max_amplitude(trace_copy, amp_min, amp_max):
            trace_copy.stats['passed_tests'] = False
            err_msg = ('Processing: Trace maximum amplitude is not '
                       'within the acceptable range: %r to %r. Skipping '
                       'processing for trace: %r' % (amp_min, amp_max, trace))
            trace_copy = _update_comments(trace_copy, err_msg)
            processed_streams.append(trace_copy)
            continue

        # Windowing
        if event_time is not None and epi_dist is not None:
            trace_trim = trim_total_window(trace_copy,
                                           event_time,
                                           epi_dist,
                                           vmin=window_vmin)
            windowed = True
            # Check if windowing failed
            if (trace_trim == -1):
                trace_copy.stats['passed_tests'] = False
                err_msg = ('Processing: Invalid time windowing. The start '
                           'time of the trace is after the calculated '
                           'end time. Skipping procesing for trace: %r', trace)
                trace_trim = _update_comments(trace_copy, err_msg)
                processed_streams.append(trace_copy)
                continue
        else:
            trace_copy.stats['passed_tests'] = False
            err_msg = ('Processing: No windowing test performed. Missing '
                       'event time and/or epicentral distance information to '
                       'perform calculation.')
            trace_copy = _update_comments(trace_copy, err_msg)
            trace_copy = _update_params(trace_copy, 'window',
                                        {'vmin': window_vmin})
            trace_trim = trace_copy
            # Corners cannot be calculated dynamically without windowing
            warnings.warn('Missing event information. Continuing processing '
                          'without windowing. Default frequencies will be '
                          'used for filtering.')
            windowed = False

        # Taper
        trace_tap = taper(trace_trim)

        # Find corner frequencies
        if get_corners and windowed:
            corners = get_corner_frequencies(trace_tap, event_time, epi_dist,
                                             sn_ratio, max_low_freq,
                                             min_high_freq, taper_type,
                                             taper_percentage, taper_side)
            if (corners[0] < 0 or corners[1] < 0):
                trace_tap.stats['passed_tests'] = False
                dynamic = False
                high_freq = default_high_frequency
                low_freq = default_low_frequency

                if corners == [-1, -1]:
                    err_msg = ('Not enough pre-event noise to calculate '
                               'signal to noise ratio. Skipping processing '
                               'for trace: %r' % (trace))
                elif corners == [-2, -2]:
                    err_msg = ('Signal-to-noise ratio too low to find corner '
                               'frequencies, skipping processing for '
                               'trace: %r' % (trace))
                else:
                    err_msg = ('Did not find any corner frequencies within '
                               'the valid bandwidth. Skipping processing for '
                               'trace: %r' % (trace))
                trace_tap = _update_comments(trace_tap, err_msg)

            else:
                low_freq = corners[0]
                high_freq = corners[1]
                dynamic = True
        else:
            high_freq = default_high_frequency
            low_freq = default_low_frequency
            dynamic = False

        corner_params = {
            'get_dynamically': dynamic,
            'default_high_frequency': high_freq,
            'sn_ratio': sn_ratio,
            'default_low_frequency': low_freq,
            'max_low_freq': max_low_freq,
            'min_high_freq': min_high_freq
        }

        channel = trace.stats.channel
        if channel in horizontals:
            low_freqs += [low_freq]
            high_freqs += [high_freq]
            horizontal_corners[channel] = {}
            horizontal_corners[channel]['corner_params'] = corner_params
            horizontal_corners[channel]['trace'] = trace_tap.copy()
        else:
            if trace_tap.stats['passed_tests'] is False:
                trace_tap = _update_params(trace_tap, 'corners', corner_params)
                processed_streams.append(trace_tap)
                continue

            trace_tap = _update_params(trace_tap, 'corners', corner_params)

            # Filter
            trace_filt = trace_tap
            for filter_dict in filters:
                filter_type = filter_dict['type']
                corners = filter_dict['corners']
                zerophase = filter_dict['zerophase']
                trace_filt = filter_waveform(trace_filt, filter_type,
                                             high_freq, low_freq, zerophase,
                                             corners)
            # Correct baseline
            if baseline_correct:
                trace_cor = correct_baseline(trace_filt)
            else:
                trace_cor = _update_params(trace_filt, 'baseline_correct',
                                           False)
            processed_streams.append(trace_cor)

    if len(low_freqs) > 0:
        low_horizontal = np.sort(low_freqs)[0]
        high_horizontal = np.sort(high_freqs)[-1]
        for channel in horizontal_corners:
            params = horizontal_corners[channel]['corner_params']
            params['default_high_frequency'] = high_horizontal
            params['default_low_frequency'] = low_horizontal
            trace = horizontal_corners[channel]['trace']

            if trace.stats['passed_tests'] is False:
                trace = _update_params(trace, 'corners', params)
                processed_streams.append(trace)
                continue
            trace_trim = _update_params(trace, 'corners', params)
            # Filter
            trace_filt = trace_trim
            for filter_dict in filters:
                filter_type = filter_dict['type']
                corners = filter_dict['corners']
                zerophase = filter_dict['zerophase']
                trace_filt = filter_waveform(trace_filt, filter_type,
                                             high_freq, low_freq, zerophase,
                                             corners)
            # Correct baseline
            if baseline_correct:
                trace_cor = correct_baseline(trace_filt)
            else:
                trace_cor = _update_params(trace_filt, 'baseline_correct',
                                           False)
            processed_streams.append(trace_cor)
    return processed_streams
示例#14
0
def group_channels(streams):
    """Consolidate streams for the same event.

    Checks to see if there are channels for one station in different
    streams, and groups them into one stream. Then streams are checked for
    duplicate channels (traces).

    Args:
        streams (list): List of Stream objects.

    Returns:
        list: List of Stream objects.
    """
    # Return the original stream if there is only one
    if len(streams) <= 1:
        return streams

    # Get the all traces
    trace_list = []
    for stream in streams:
        for trace in stream:
            if trace.stats.network == '' or str(trace.stats.network) == 'nan':
                trace.stats.network = 'ZZ'
            if str(trace.stats.location) == 'nan':
                trace.stats.location = ''
            if trace.stats.location == '' or str(
                    trace.stats.location) == 'nan':
                trace.stats.location = '--'
            trace_list += [trace]

    # Create a list of duplicate traces and event matches
    duplicate_list = []
    match_list = []
    for idx1, trace1 in enumerate(trace_list):
        matches = []
        network = trace1.stats['network']
        station = trace1.stats['station']
        starttime = trace1.stats['starttime']
        endtime = trace1.stats['endtime']
        channel = trace1.stats['channel']
        location = trace1.stats['location']
        if 'units' in trace1.stats.standard:
            units = trace1.stats.standard['units']
        else:
            units = ''
        if 'process_level' in trace1.stats.standard:
            process_level = trace1.stats.standard['process_level']
        else:
            process_level = ''
        data = np.asarray(trace1.data)
        for idx2, trace2 in enumerate(trace_list):
            if idx1 != idx2 and idx1 not in duplicate_list:
                event_match = False
                duplicate = False
                if data.shape == trace2.data.shape:
                    try:
                        same_data = ((data == np.asarray(trace2.data)).all())
                    except AttributeError:
                        same_data = (data == np.asarray(trace2.data))
                else:
                    same_data = False
                if 'units' in trace2.stats.standard:
                    units2 = trace2.stats.standard['units']
                else:
                    units2 = ''
                if 'process_level' in trace2.stats.standard:
                    process_level2 = trace2.stats.standard['process_level']
                else:
                    process_level2 = ''
                if (network == trace2.stats['network']
                        and station == trace2.stats['station']
                        and starttime == trace2.stats['starttime']
                        and endtime == trace2.stats['endtime']
                        and channel == trace2.stats['channel']
                        and location == trace2.stats['location']
                        and units == units2 and process_level == process_level2
                        and same_data):
                    duplicate = True
                elif (network == trace2.stats['network']
                      and station == trace2.stats['station']
                      and starttime == trace2.stats['starttime']
                      and location == trace2.stats['location']
                      and units == units2 and process_level == process_level2):
                    event_match = True
                if duplicate:
                    duplicate_list += [idx2]
                if event_match:
                    matches += [idx2]
        match_list += [matches]

    # Create an updated list of streams
    streams = []
    for idx, matches in enumerate(match_list):
        stream = Stream()
        grouped = False
        for match_idx in matches:
            if match_idx not in duplicate_list:
                if idx not in duplicate_list:
                    stream.append(trace_list[match_idx])
                    duplicate_list += [match_idx]
                    grouped = True
        if grouped:
            stream.append(trace_list[idx])
            duplicate_list += [idx]
            streams += [stream]

    # Check for ungrouped traces
    for idx, trace in enumerate(trace_list):
        if idx not in duplicate_list:
            stream = Stream()
            streams += [stream.append(trace)]
            logging.warning('One channel stream:\n%s' % (stream))

    # Check for streams with more than three channels
    for stream in streams:
        if len(stream) > 3:
            raise GMProcessException('Stream with more than 3 channels:\n%s.' %
                                     (stream))

    return streams
示例#15
0
)

mdl = "tmp"
download = False

if download:
    mdl = MassDownloader(providers=["SCEDC"])
    mdl.download(domain,
                 restrictions,
                 mseed_storage=data_path + "waveforms/USC/",
                 stationxml_storage=data_path + "stations/")

#%% Read in a couple test waveforms
plots = False

HNx_st = Stream()
LNx_st = Stream()

# High Broad Band (H??)
HNx_st += read(data_path + "waveforms/USC/*HN*.mseed")
# Long Period (L??)
LNx_st += read(data_path + "waveforms/USC/*LN*.mseed")

if plots:
    HNx_st.plot()
    LNx_st.plot()

#%% Try to get Arias intensity ...
HNx_st = HNx_st.detrend()
LNx_st = LNx_st.detrend()
示例#16
0
class drumPlot(Client):
    _file = 'tr.mseed'  # 'traces.mseed'
    _traces = Stream()
    _inv = read_inventory("metadata/Braskem_metadata.xml")
    _rtSft = rtSft
    _lastData = UTCDateTime.now()
    _traces = Stream()
    _appTrace = Stream()
    _drTrace = Stream()
    _drHTrace = Stream()
    _rTWindow = rTWindow
    _tEnd = UTCDateTime.now()
    _tNow = UTCDateTime.now()
    _rtRunning = False
    _hyRunning = False
    _saving = False
    _elRunning = False
    _status = {}
    _elab = {}
    _elabHyst={}

    def statusCalc(self):
        for tr in self._traces:
            id = tr.get_id()
            l = int(UTCDateTime.now() - tr.stats['endtime'])
            station = id.split('.')[1]
            self._status[station] = {}
            self._status[station]["Noise Level"] = "---"
            self._status[station]["Latency"] = str(l) + 's'
            self._status[station]["Voltage"] = "---"
            self._status[station]["Color"] = "#FF0000"
        with open('geophone_network_status.json', 'w') as fp:
            json.dump(self._status, fp)
            fp.close()
        sftp.put('geophone_network_status.json', 'uploads/RT/' + 'geophone_network_status.json')

    def singleStatusCalc(self, tr):
        id = tr.get_id()
        station = id.split('.')[1]
        l = int(UTCDateTime.now() - tr.stats['endtime'])
        self._status[station] = {}
        self._status[station]["Noise Level"] = "---"
        self._status[station]["Latency"] = str(l) + 's'
        self._status[station]["Voltage"] = "---"
        self._status[station]["Color"] = "#FF0000"

    def plotDrum(self, trace, filename='tmp.png'):
        print(trace.get_id())
        try:
            trace.data = trace.data * 1000 / 3.650539e+08

            im = trace.plot(type='dayplot',
                            dpi=dpi,
                            x_labels_size=int(8 * 100 / int(dpi)),
                            y_labels_size=int(8 * 100 / int(dpi)),
                            title_size=int(1000 / int(dpi)),
                            title=self._tEnd.strftime("%Y/%m/%d %H:%M:%S"),
                            size=(sizex, sizey),
                            color=('#AF0000', '#00AF00', '#0000AF'),
                            vertical_scaling_range=yRange,
                            handle=True,
                            time_offset=-3,
                            data_unit='mm/s'
                            )
            if not os.path.exists(os.path.dirname(filename)):
                os.makedirs(os.path.dirname(filename))
            im.savefig(filename)
            plt.close(im)

            return True
        except:
            print('ops,something wrong in plotting!!')
            return False

    def realTimeDrumPlot(self):
        print('start ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        appTrace = Stream()
        self._rtRunning = True
        for tr in self._traces:
            id = tr.get_id()
            spl = id.split('.')
            network = spl[0]
            station = spl[1]
            channel = spl[3]
            l = int(self._tEnd - tr.stats['endtime'])
            self._status[station] = {}
            self._status[station]["Noise Level"] = "---"
            self._status[station]["Latency"] = str(l) + 's'
            self._status[station]["Voltage"] = "---"
            self._status[station]["Color"] = "#FF0000"

            for b in band:
                fileNameRT = 'RT_' + network + '_' + station + '_' + channel + '_' + str(b) + '.png'
                appTrace = tr.copy()
                bb = band[b]
                appTrace.trim(self._tEnd - self._rTWindow * 60, self._tEnd, pad=True, fill_value=0)
                appTrace.filter('bandpass', freqmin=bb[0], freqmax=bb[1], corners=2, zerophase=True)
                self.plotDrum(appTrace, basePath + 'RT/' + fileNameRT)

        with open(basePath + 'RT/geophone_network_status.json', 'w') as fp:
            json.dump(self._status, fp)
            fp.close()

        print('end ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        self._rtRunning = False

    def hystDrumPlot(self):
        print('Hyststart ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        appTrace = Stream()
        self._hyRunning = True

        for tr in self._traces:
            id = tr.get_id()
            # print('hyst '+id)
            spl = id.split('.')
            network = spl[0]
            station = spl[1]
            channel = spl[3]

            for h in hystType:

                if self._tEnd.hour % int(h / 60) == 0:
                    for b in band:
                        tStart = self._tEnd - h * 60
                        p = network + '/' + station + '/' + channel + '/' + str(self._tEnd.year) + '/' + str(
                            self._tEnd.month) + '/' + str(
                            self._tEnd.day) + '/' + str(h) + '/' + str(b)

                        fileName = p + '/' + tStart.strftime("%Y%m%d%H%M") + '_' + self._tEnd.strftime(
                            "%Y%m%d%H%M") + '.png'

                        appTrace = tr.copy()
                        bb = band[b]
                        appTrace.trim(tStart, self._tEnd, pad=True, fill_value=0)
                        appTrace.filter('bandpass', freqmin=bb[0], freqmax=bb[1], corners=2, zerophase=True)
                        self.plotDrum(appTrace, basePath + fileName)

        print('Hystend ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        self._hyRunning = False

    def hystElab(self):

        for e in self._elabHyst:
            p = e.split('_')
            network = p[0]
            station = p[1]
            p = basePath + network + '/' + station + '/' + 'ELAB' + '/' + str(self._tEnd.year) + '/' + str(
                self._tEnd.month) + '/' + str(
                self._tEnd.day) + '/ELAB_' + e + '.json'
            if not os.path.exists(os.path.dirname(p)):
                os.makedirs(os.path.dirname(p))
            el = self._elabHyst[e]
            with open(p, 'w') as fp:
                json.dump(el, fp)
                fp.close()
            self._elabHyst[e]={}

    def elab(self):
        print('tremorStart ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        s = np.asarray(self.get_all_nslc())
        appTrace = Stream()
        stTrace = Stream()
        self._elRunning = True
        for network in np.unique(s[:, 0]):
            for station in np.unique(s[:, 1]):

                stTrace = self._traces.select(network, station)
                elab = {
                    'ts': np.long(self._tEnd.strftime("%Y%m%d%H%M%S"))

                }
                # TREMOR
                for tr in stTrace:
                    rms = {}
                    id = tr.get_id()
                    spl = id.split('.')
                    channel = spl[3]
                    elab[channel] = {}
                    tStart = self._tEnd - 60
                    appTrace = tr.copy()
                    appTrace.trim(tStart, self._tEnd)
                    appTrace.remove_response(self._inv)

                    for b in band:
                        bb = band[b]
                        trF = appTrace.copy()
                        trF.filter('bandpass', freqmin=bb[0], freqmax=bb[1], corners=2, zerophase=True)
                        rms[b] = np.sqrt(np.mean(trF.data ** 2))
                        # print(id+' '+str(b)+' '+str(rms))
                        elab[channel]['rms_' + b] = rms[b]
                nTr=network + '_' + station
                try:
                    self._elab[nTr][elab['ts']]=elab
                    self._elabHyst[nTr][elab['ts']] = elab
                except:
                    self._elab[nTr]={}
                    self._elab[nTr][elab['ts']] = elab
                    self._elabHyst[nTr] = {}
                    self._elabHyst[nTr][elab['ts']] = elab

                #pulisco e slavo
                m=np.long((self._tEnd-1440*60).strftime("%Y%m%d%H%M%S"))
                mm=np.min(list(self._elab[nTr].keys()))
                if mm<m:
                    self._elab[nTr].pop(mm)
                for e in self._elab:
                    filename = basePath + 'RT/ELAB_' + e + '.json'

                    with open(filename, 'w') as fp:
                        json.dump(list(self._elab[e].values()), fp)
                        fp.close()
        #np.savez('elSave',h=self._elabHyst,e=self._elab)
        self._elRunning = False

    def run(self, network, station, channel):
        r=False
        try:
            data=np.load('elSave.npz')
        except:
            pass

        while 1 < 2:
            time.sleep(5)
            self._tNow = UTCDateTime.now()
            print(self._tNow)
            if self._tNow.second < self._lastData.second:
                self._tEnd = self._tNow
                self._traces = self.get_waveforms(network, station, '', channel, self._tEnd - 720 * 60,
                                                  UTCDateTime.now())
                print(self._traces)

                if not self._elRunning:
                    elThread = Thread(target=self.elab)
                    elThread.start()


                if self._tNow.hour<self._lastData.hour:
                    elSave = Thread(target=self.hystElab())
                    elSave.start()

                if (self._tNow.minute % self._rtSft == 0) & (self._lastData.minute % self._rtSft != 0):
                    if not self._rtRunning:
                        rtThread = Thread(target=self.realTimeDrumPlot)
                        rtThread.start()

                if (self._tEnd.minute == 0) & (self._lastData.minute != 0):
                    if not self._hyRunning:
                        hyThread = Thread(target=self.hystDrumPlot)
                        hyThread.start()

            self._lastData = self._tNow

    def on_data(self, traces):

        self._tNow = UTCDateTime.now()
        print(self._tNow)

        # traces.remove_response(self._inv)
        # self._traces += traces
        # self._traces.merge(fill_value=0)
        # if (self._tEnd.minute != self._lastData.minute):
        #     if not self._trRunning:
        #         trThread = Thread(target=self.tremor)
        #         trThread.start()

        if (self._tNow.minute % self._rtSft == 0) & (self._lastData.minute % self._rtSft != 0):
            self._tEnd = self._tNow
            self._traces.trim(self._tEnd - 720 * 60, self._tNow)
            print(self._traces)
            if not self._rtRunning:
                rtThread = Thread(target=self.realTimeDrumPlot)
                rtThread.start()
            if not self._saving:
                sThread = Thread(target=self.save)
                sThread.start()

        if (self._tEnd.minute == 0) & (self._lastData.minute != 0):
            self._tEnd = self._tNow

            if not self._hyRunning:
                hyThread = Thread(target=self.hystDrumPlot)
                hyThread.start()
        #     # self.hystDrumPlot()

        self._lastData = self._tNow

    def expt(self,start,end,st,ch):

        tr = client.get_waveforms('LK', st, '', ch, UTCDateTime.strptime(start,"%Y%m%dT%H%M%S"), UTCDateTime.strptime(end,"%Y%m%dT%H%M%S"))
        tr.remove_response(self._inv)
        tr.write('../../../../mnt/ide/traces.mseed')
        sts = st.select(channel=channels[i], location=locations[i])
    # Fix to remove overlaps, but not mask the data
    sts = sts.merge()
    sts = sts.split()
    sts.sort(keys=['starttime', 'endtime', 'channel'])

    
    print(sts)
    for j, tr in enumerate(sts):
        print("Working on trace {}".format(j))
        print(tr)
        length = tr.stats['endtime'] - tr.stats['starttime']
        cumlen = cumlen + length
        nevents_tr = nevents*length/secyear
        ppsd = PPSD(tr.stats, metadata=inv, ppsd_length=200.0)
        ppsd.add(Stream(tr))
        psdmean = 0
        for period in psdperiodrange:
            psds = ppsd.extract_psd_values(period)[0]
            psdmean = psdmean + math.pow(10.0, 0.05*np.mean(psds))
        psdamp = psdmean/len(psdperiodrange)    
        threshold = psdamp*snr
        print("{} Threshold: {}".format(j,threshold))
        nev_tr = np.zeros_like(nevents)
        for k, mag in enumerate(magarray):
            idx = next((x for x, v in enumerate(amp_mag_dist[k][::-1])
                        if v>threshold), None)
            if idx is not None:
                idx = len(distarray)-idx-1
                nev_tr[:, :, k] = afrac[idx]*nevents_tr[:, :, k]
示例#18
0
class drumPlot(Client):

    _file = 'tr.mseed'  # 'traces.mseed'
    _traces = Stream()
    _inv = read_inventory("metadata/Braskem_metadata.xml")
    _rtSft = rtSft
    _lastData = UTCDateTime.now()
    _traces = Stream()
    _2minRTraces = Stream()
    _appTrace = Stream()
    _drTrace = Stream()
    _drHTrace = Stream()
    _rTWindow = rTWindow
    _tEnd = UTCDateTime.now()
    _tNow = UTCDateTime.now()
    _rtRunning = False
    _hyRunning = False
    _saving = False
    _elRunning = False
    _status = {}
    _elab = {}
    _elabHyst = {}
    _events = []
    _alertTable = ''
    _polAn = {
        'polWinLen': 5,
        'polWinFr': .1,
        'fLow': 4,
        'fHigh': 12,
        'plTh': 0.000005  #0.8
    }
    _amplAn = {
        'lowFW': [1, 20],
        'highFW': [20, 50],
        'lowFTh': 0.00001,
        'highFTh': 0.00005
    }

    _polAnResult = []

    def plotDrum(self, trace, filename='tmp.png'):
        #print(trace.get_id())
        try:
            trace.data = trace.data * 1000 / 3.650539e+08
            #im,ax=plt.subplots()
            if not os.path.exists(os.path.dirname(filename)):
                os.makedirs(os.path.dirname(filename))

            #im=
            trace.plot(
                type='dayplot',
                dpi=dpi,
                x_labels_size=int(8 * 100 / int(dpi)),
                y_labels_size=int(8 * 100 / int(dpi)),
                title_size=int(1000 / int(dpi)),
                title=self._tEnd.strftime("%Y/%m/%d %H:%M:%S"),
                size=(sizex, sizey),
                color=('#AF0000', '#00AF00', '#0000AF'),
                vertical_scaling_range=yRange,
                outfile=filename,
                #handle=True,
                time_offset=-3,
                data_unit='mm/s',
                events=self._events)
            #            im.savefig(filename)
            #            plt.close(im)

            return True
        except:
            print('ops,something wrong in plotting!!')
            return False

    def realTimeDrumPlot(self):
        print('RealTime plot start ' +
              UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        appTrace = Stream()
        self._rtRunning = True
        for tr in self._traces:
            id = tr.get_id()
            spl = id.split('.')
            network = spl[0]
            station = spl[1]
            channel = spl[3]
            l = int(self._tEnd - tr.stats['endtime'])
            self._status[station] = {}
            self._status[station]["Noise Level"] = "---"
            self._status[station]["Latency"] = str(l) + 's'
            self._status[station]["Voltage"] = "---"
            self._status[station]["Color"] = "#FF0000"

            for b in band:
                fileNameRT = 'RT_' + network + '_' + station + '_' + channel + '_' + str(
                    b) + '.png'
                appTrace = tr.copy()
                bb = band[b]
                appTrace.trim(self._tEnd - self._rTWindow * 60,
                              self._tEnd,
                              pad=True,
                              fill_value=0)
                appTrace.filter('bandpass',
                                freqmin=bb[0],
                                freqmax=bb[1],
                                corners=2,
                                zerophase=True)
                self.plotDrum(appTrace, self._basePathRT + 'RT/' + fileNameRT)

        with open(self._basePathRT + 'RT/geophone_network_status.json',
                  'w') as fp:
            json.dump(self._status, fp)
            fp.close()

        print('realTime end ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        self._rtRunning = False

    def hystDrumPlot(self, tEnd=0):

        appTrace = Stream()
        self._hyRunning = True
        if tEnd == 0:
            tEnd = self._tEnd
        else:
            self._tEnd = tEnd
        print('Hyststart ' + tEnd.strftime("%Y%m%d %H%M%S"))
        for tr in self._traces:
            id = tr.get_id()
            # print('hyst '+id)
            spl = id.split('.')
            network = spl[0]
            station = spl[1]
            channel = spl[3]

            for h in hystType:

                if tEnd.hour % int(h / 60) == 0:
                    for b in band:
                        tStart = tEnd - h * 60
                        p = network + '/' + station + '/' + channel + '/' + str(
                            tStart.year) + '/' + str(tStart.month) + '/' + str(
                                tStart.day) + '/' + str(h) + '/' + str(b)

                        fileName = p + '/' + tStart.strftime(
                            "%Y%m%d%H"
                        ) + '00.png'  # + '_' + (self._tEnd-60).strftime(
                        #"%Y%m%d%H") + '.png'

                        appTrace = tr.copy()
                        bb = band[b]
                        appTrace.trim(tStart, tEnd, pad=True, fill_value=0)
                        appTrace.filter('bandpass',
                                        freqmin=bb[0],
                                        freqmax=bb[1],
                                        corners=2,
                                        zerophase=True)
                        self.plotDrum(appTrace, self._basePath + fileName)

        self._hyRunning = False

    def hystElab(self):
        tStart = self._tEnd - 1440 * 60
        for e in self._elabHyst:
            p = e.split('_')
            network = p[0]
            station = p[1]
            p = self._basePath + network + '/' + station + '/' + 'ELAB' + '/' + str(
                tStart.year) + '/' + str(tStart.month) + '/' + str(
                    tStart.day) + '/' + tStart.strftime(
                        "%Y%m%d%H") + '00.json'  #ELAB_' + e + '.json'
            if not os.path.exists(os.path.dirname(p)):
                os.makedirs(os.path.dirname(p))
            # el = self._elabHyst[e]
            with open(p, 'w') as fp:
                json.dump(list(self._elabHyst[e].values()), fp)
                fp.close()
            self._elabHyst[e] = {}

    def elab(self):
        self._elRunning = True

        tStart = self._tEnd - 60
        s = np.asarray(self.get_all_nslc())

        intTrace = self._2minRTraces.copy()
        intTrace.trim(tStart, self._tEnd)

        for network in np.unique(s[:, 0]):
            for station in np.unique(s[:, 1]):
                print('elab ' + station)
                stTrace = intTrace.select(network, station)
                elab = {'ts': np.long(self._tEnd.strftime("%Y%m%d%H%M%S"))}
                # TREMOR
                nTr = network + '_' + station
                # f = self.elabWhere(nTr, (self._tEnd - 3600).strftime("%Y%m%d%H%M%S"),
                #                    self._tEnd.strftime("%Y%m%d%H%M%S"))
                for appTrace in stTrace:
                    rms = {}
                    id = appTrace.get_id()
                    spl = id.split('.')
                    channel = spl[3]
                    elab[channel] = {}
                    # tStart = self._tEnd - 60
                    # appTrace = tr.copy()
                    # appTrace.trim(tStart, self._tEnd)
                    # appTrace.remove_response(self._inv)

                    for b in band:
                        bb = band[b]
                        trF = appTrace.copy()
                        trF.filter('bandpass',
                                   freqmin=bb[0],
                                   freqmax=bb[1],
                                   corners=2,
                                   zerophase=True)
                        rms[b] = np.sqrt(np.mean(trF.data**2))
                        elab[channel]['rms_' + b] = str("%0.2e" % rms[b])
                        # HC_rms = np.sum([float(s[channel]['rms_' + b]) for s in f])
                        # elab[channel]['HC_rms_' + b] = str("%0.2e" % HC_rms)

                try:
                    self._elab[nTr][elab['ts']] = elab
                    self._elabHyst[nTr][elab['ts']] = elab
                except:
                    self._elab[nTr] = {}
                    self._elab[nTr][elab['ts']] = elab
                    self._elabHyst[nTr] = {}
                    self._elabHyst[nTr][elab['ts']] = elab

                # pulisco e slavo
                m = np.long((self._tEnd - 1440 * 60).strftime("%Y%m%d%H%M%S"))
                mm = np.min(list(self._elab[nTr].keys()))
                if mm < m:
                    self._elab[nTr].pop(mm)
                for e in self._elab:
                    filename = self._basePathRT + 'RT/ELAB_' + e + '.json'

                    with open(filename, 'w') as fp:
                        json.dump(list(self._elab[e].values()), fp)
                        fp.close()

                # except:
                #     print('failed elab in '+station)
                #     pass

        np.savez(self._basePath + 'elSave', h=self._elabHyst, e=self._elab)
        self._elRunning = False

    def elabWhere(self, id, ts, te):
        r = []
        ts = np.long(ts)
        te = np.long(te)
        try:
            for x in (y for y in self._elab[id].keys() if (y > ts) & (y < te)):
                r.append(self._elab[id][x])
        except:
            pass
        return r

    def An(self, table='seismic.alerts'):

        appTrace = self._2minRTraces.copy()
        ts = self._tEnd - 70
        te = self._tEnd - 10

        appTraceLow = self._2minRTraces.copy()
        appTraceLow.filter('bandpass',
                           freqmin=self._amplAn['lowFW'][0],
                           freqmax=self._amplAn['lowFW'][1],
                           corners=3,
                           zerophase=True)
        appTraceLow.trim(ts, te)

        appTraceHigh = self._2minRTraces.copy()
        appTraceHigh.filter('bandpass',
                            freqmin=self._amplAn['highFW'][0],
                            freqmax=self._amplAn['highFW'][1],
                            corners=3,
                            zerophase=True)
        appTraceHigh.trim(ts, te)
        # appTrace.filter('bandpass', freqmin=self._polAn['fLow'], freqmax=self._polAn['fHigh'], corners=3, zerophase=True)
        # appTrace.trim(ts,te)
        s = np.asarray(self.get_all_nslc())
        for network in np.unique(s[:, 0]):
            for station in np.unique(s[:, 1]):
                nTr = network + '_' + station
                try:
                    print('amplitude analisys ' + station)
                    runPolAn = False
                    stTrace = appTraceLow.select(network, station)
                    envL = [
                        obspy.signal.filter.envelope(st.data) for st in stTrace
                    ]
                    if np.max([np.max(e)
                               for e in envL]) > self._amplAn['lowFTh']:
                        a = alert(table)
                        a._a['utc_time'] = "'" + UTCDateTime(te).strftime(
                            "%Y-%m-%d %H:%M:%S") + "'"
                        a._a['utc_time_str'] = "'" + UTCDateTime(te).strftime(
                            "%Y-%m-%d %H:%M:%S") + "'"
                        a._a['event_type'] = "'AML'"
                        a._a['station'] = "'" + nTr + "'"
                        a._a['amplitude_ehe'] = np.max(envL[0])
                        a._a['amplitude_ehn'] = np.max(envL[1])
                        a._a['amplitude_ehz'] = np.max(envL[2])
                        a.insert()
                        runPolAn = True

                    stTrace = appTraceHigh.select(network, station)
                    envH = [
                        obspy.signal.filter.envelope(st.data) for st in stTrace
                    ]
                    if np.max([np.max(e)
                               for e in envH]) > self._amplAn['highFTh']:
                        a = alert(table)
                        a._a['utc_time'] = "'" + UTCDateTime(te).strftime(
                            "%Y-%m-%d %H:%M:%S") + "'"
                        a._a['utc_time_str'] = "'" + UTCDateTime(te).strftime(
                            "%Y-%m-%d %H:%M:%S") + "'"
                        a._a['event_type'] = "'AMH'"
                        a._a['station'] = "'" + nTr + "'"
                        a._a['amplitude_ehe'] = np.max(envH[0])
                        a._a['amplitude_ehn'] = np.max(envH[1])
                        a._a['amplitude_ehz'] = np.max(envH[2])
                        a.insert()
                        runPolAn = False

                    # try:
                    #     print('polarizzation analisys ' + station)
                    #     stTrace = appTrace.select(network, station)
                    #     pAM = multiprocessing.Process(target=self.polAn, args=(stTrace,ts,te,nTr,envL,table,))
                    #     pAM.start()
                    #             #self.polAn(stTrace,ts,te)
                    # except:
                    #     print('polarizzation analisys ' + station + ' failed')
                    # pass
                except:
                    print('amplitude analisys ' + station + ' failed')
                    pass

    def polAn(self, stTrace, ts, te, nTr, env, table):

        u = obspy.signal.polarization.polarization_analysis(
            stTrace, self._polAn['polWinLen'], self._polAn['polWinFr'],
            self._polAn['fLow'], self._polAn['fHigh'], ts, te, False, 'pm',
            self._polAn['plTh']**2)

        x = np.where(u['azimuth_error'] > 0.01)  #self._polAn['plTh'])
        a = alert(table)

        for xx in x[0]:
            a._a['utc_time'] = "'" + UTCDateTime(
                u['timestamp'][xx]).strftime("%Y-%m-%d %H:%M:%S") + "'"
            a._a['utc_time_str'] = "'" + UTCDateTime(
                u['timestamp'][xx]).strftime("%Y-%m-%d %H:%M:%S") + "'"
            a._a['event_type'] = "'PL'"
            a._a['station'] = "'" + nTr + "'"
            #a._a['linearity'] = u['planarity'][xx]
            a._a['az'] = u['azimuth'][xx]
            a._a['tkoff'] = u['incidence'][xx]
            y = np.where(
                (u['timestamp'][xx] -
                 self._polAn['polWinLen'] < stTrace[0].times('timestamp'))
                & (stTrace[0].times('timestamp') <= u['timestamp'][xx]))
            a._a['amplitude_ehe'] = np.max(env[0][y])
            a._a['amplitude_ehn'] = np.max(env[1][y])
            a._a['amplitude_ehz'] = np.max(env[2][y])
            # print(a._a)

            a.insert()

    def run(self, network, station, channel, rt=True):
        logging.basicConfig(filename='log.log',
                            level='WARNING',
                            format='%(asctime)s %(message)s')
        tStart = UTCDateTime()
        try:
            fileLast = open("last.txt", "r")
            tStart = UTCDateTime.strptime(fileLast.read().rstrip("\n"),
                                          "%Y-%m-%d %H:%M:%S")
        except:
            pass
        # r=False
        #
        # try:
        #
        #     data=np.load(self._basePath+'elSave.npz')
        #     self._elab=data['e'].tolist()
        #     self._elabHyst=data['h'].tolist()
        # except:
        #
        #     pass

        self._stationData = {
            'BRK0': self._inv.get_coordinates('LK.BRK0..EHZ'),
            'BRK1': self._inv.get_coordinates('LK.BRK1..EHZ'),
            'BRK2': self._inv.get_coordinates('LK.BRK2..EHZ'),
            'BRK3': self._inv.get_coordinates('LK.BRK3..EHZ'),
            'BRK4': self._inv.get_coordinates('LK.BRK4..EHZ'),
        }

        with open(self._basePathRT + 'elab_status.json', 'w') as fp:
            json.dump(self._stationData, fp)
            fp.close()

        self._tNow = tStart
        while 1 < 2:

            if self._tNow > UTCDateTime.now() - 5:
                time.sleep(5)
                self._tNow = UTCDateTime.now()
                rt = True
                print(self._tNow)
            else:
                self._tNow += 10
                print(self._tNow)
                rt = False
                # if (not rt) & (not self._rtRunning)&(not self._hyRunning)&( not self._saving)& (not self._elRunning):
                #
                #     print('sk')
                #     print(self._tNow)

            if self._tNow.second < self._lastData.second:
                self._tEnd = self._tNow

                print('getting traces')
                try:
                    self._traces = self.get_waveforms(network, station, '',
                                                      channel,
                                                      self._tEnd - 720 * 60,
                                                      self._tEnd)
                    self._traces.merge(fill_value=0)
                except:
                    print('failed to get traces')

                if (self._tNow.minute % self._rtSft == 0) & (
                        self._lastData.minute % self._rtSft != 0) & rt:
                    print('getting events')
                    try:
                        self.getCasp()
                    except:
                        print('events failed')
                        pass

                    try:
                        self.pushEv()
                    except:
                        print('push events failed')
                        pass

                    if (not self._rtRunning) & rt:
                        pRt = multiprocessing.Process(
                            target=self.realTimeDrumPlot)
                        pRt.start()

                if (not self._elRunning):
                    self._2minRTraces = self._traces.copy()
                    self._2minRTraces.trim(self._tEnd - 120, self._tEnd)
                    self._2minRTraces.remove_response(self._inv)
                    self._2minRTraces.sort()

                    # if rt:
                    #     self.elab()
                    self.An(self._alertTable)

                if (self._tEnd.minute == 0) & (self._lastData.minute != 0):
                    if not self._hyRunning:
                        pHy = multiprocessing.Process(target=self.hystDrumPlot)
                        pHy.start()

                if self._tNow.hour < self._lastData.hour:
                    self.hystElab()

            self._lastData = self._tNow
            fileLast = open("last.txt", "w")
            fileLast.write(
                UTCDateTime(self._lastData).strftime("%Y-%m-%d %H:%M:%S"))
            fileLast.close()

    def getCasp(self):
        connection = psycopg2.connect(host='172.16.8.10',
                                      port='5432',
                                      database='casp_events',
                                      user='******',
                                      password='******')
        sql = 'SELECT event_id, t0, lat, lon, dpt, magWA FROM auto_eventi'
        cursor = connection.cursor()
        cursor.execute(sql)
        p = cursor.fetchall()
        self._events = []
        for pp in p:
            e = {
                'id': pp[0],
                'time': UTCDateTime(pp[1]),
                'text': 'CASP ev. mag' + str(pp[5]),
                'lat': np.float(pp[2]),
                'lon': np.float(pp[3]),
                'dpt': np.float(pp[4]),
                'mag': np.float(pp[5])
            }
            self._events.append(e)

    def pushEv(self):
        connection = psycopg2.connect(host='80.211.98.179',
                                      port='5432',
                                      user='******',
                                      password='******')
        for e in self._events:

            sql = 'INSERT INTO seismic.events_casp (geom,lat,lon,utc_time,utc_time_str,magnitudo,depth,id_casp) ' \
                  "VALUES (ST_GeomFromText('POINT(" + str(e['lon']) + ' ' + str(e['lat']) + ")', 4326),"\
                  + str(e['lat']) + ','+ str(e['lon'])+ ",'"+  str(UTCDateTime(e['time']).strftime("%Y-%m-%d %H:%M:%S"))+ "','"+  str(UTCDateTime(e['time']).strftime("%Y-%m-%d %H:%M:%S"))+"',"+str(e['mag'])+','+ str(e['dpt'])  +','+e['id']+") ON CONFLICT DO NOTHING;"
            connection.cursor().execute(sql)
            connection.commit()

    def pushIntEv(self, e, table='seismic.events_swarm', id='id_swarm'):
        connection = psycopg2.connect(host='80.211.98.179',
                                      port='5432',
                                      user='******',
                                      password='******')
        #for e in events:

        sql = 'INSERT INTO '+table+ ' (geom,note,lat,lon,utc_time,utc_time_str,magnitudo,depth,'+id+') ' \
              "VALUES (ST_GeomFromText('POINT(" + str(e['lon']) + ' ' + str(e['lat']) + ")', 4326)" \
              + ",'" + e['note'] + "'," + str(e['lat']) + ',' + str(e['lon']) + ",'" + str(
            UTCDateTime(e['time']).strftime("%Y-%m-%d %H:%M:%S")) + "','" + str(
            UTCDateTime(e['time']).strftime("%Y-%m-%d %H:%M:%S")) + "'," + str(e['mag']) + ',' + str(e['dpt']) + ",'" + \
              e['id'] + "') ON CONFLICT DO NOTHING;"
        connection.cursor().execute(sql)
        connection.commit()
示例#19
0
 def get_preview(self,
                 trace_ids=[],
                 starttime=None,
                 endtime=None,
                 network=None,
                 station=None,
                 location=None,
                 channel=None,
                 pad=False):
     """
     Returns the preview trace.
     """
     # build up query
     session = self.session()
     query = session.query(WaveformChannel)
     # start and end time
     try:
         starttime = UTCDateTime(starttime)
     except Exception:
         starttime = UTCDateTime() - 60 * 20
     finally:
         query = query.filter(WaveformChannel.endtime > starttime.datetime)
     try:
         endtime = UTCDateTime(endtime)
     except Exception:
         # 10 minutes
         endtime = UTCDateTime()
     finally:
         query = query.filter(WaveformChannel.starttime < endtime.datetime)
     # process arguments
     if trace_ids:
         # filter over trace id list
         trace_filter = or_()
         for trace_id in trace_ids:
             temp = trace_id.split('.')
             if len(temp) != 4:
                 continue
             trace_filter.append(
                 and_(WaveformChannel.network == temp[0],
                      WaveformChannel.station == temp[1],
                      WaveformChannel.location == temp[2],
                      WaveformChannel.channel == temp[3]))
         if trace_filter.clauses:
             query = query.filter(trace_filter)
     else:
         # filter over network/station/location/channel id
         kwargs = {
             'network': network,
             'station': station,
             'location': location,
             'channel': channel
         }
         for key, value in kwargs.items():
             if value is None:
                 continue
             col = getattr(WaveformChannel, key)
             if '*' in value or '?' in value:
                 value = value.replace('?', '_')
                 value = value.replace('*', '%')
                 query = query.filter(col.like(value))
             else:
                 query = query.filter(col == value)
     # execute query
     results = query.all()
     session.close()
     # create Stream
     st = Stream()
     for result in results:
         preview = result.get_preview()
         st.append(preview)
     # merge and trim
     st = merge_previews(st)
     st.trim(starttime, endtime, pad=pad)
     return st
示例#20
0
def merge_previews(stream):
    """
    Merges all preview traces in one Stream object. Does not change the
    original stream because the data needs to be copied anyway.

    :type stream: :class:`~obspy.core.stream.Stream`
    :param stream: Stream object to be merged
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: Merged Stream object.
    """
    copied_traces = copy(stream.traces)
    stream.sort()
    # Group traces by id.
    traces = {}
    dtypes = []
    for trace in stream:
        # Throw away empty traces.
        if trace.stats.npts == 0:
            continue
        if not hasattr(trace.stats, 'preview') or not trace.stats.preview:
            msg = 'Trace\n%s\n is no preview file.' % str(trace)
            raise Exception(msg)
        traces.setdefault(trace.id, [])
        traces[trace.id].append(trace)
        dtypes.append(trace.data.dtype)
    if len(traces) == 0:
        return Stream()
    # Initialize new Stream object.
    new_stream = Stream()
    for value in traces.values():
        if len(value) == 1:
            new_stream.append(value[0])
            continue
        # All traces need to have the same delta value and also be on the same
        # grid spacing. It is enough to only check the sampling rate because
        # the algorithm that creates the preview assures that the grid spacing
        # is correct.
        sampling_rates = {tr.stats.sampling_rate for tr in value}
        if len(sampling_rates) != 1:
            msg = 'More than one sampling rate for traces with id %s.' % \
                  value[0].id
            raise Exception(msg)
        delta = value[0].stats.delta
        # Check dtype.
        dtypes = {native_str(tr.data.dtype) for tr in value}
        if len(dtypes) > 1:
            msg = 'Different dtypes for traces with id %s' % value[0].id
            raise Exception(msg)
        dtype = dtypes.pop()
        # Get the minimum start and maximum end time for all traces.
        min_starttime = min([tr.stats.starttime for tr in value])
        max_endtime = max([tr.stats.endtime for tr in value])
        samples = int(round((max_endtime - min_starttime) / delta)) + 1
        data = np.empty(samples, dtype=dtype)
        # Fill with negative one values which corresponds to a gap.
        data[:] = -1
        # Create trace and give starttime.
        new_trace = Trace(data=data, header=value[0].stats)
        # Loop over all traces in value and add to data.
        for trace in value:
            start_index = int((trace.stats.starttime - min_starttime) / delta)
            end_index = start_index + len(trace.data)
            # Element-by-element comparison.
            data[start_index:end_index] = \
                np.maximum(data[start_index:end_index], trace.data)
        # set npts again, because data is changed in place
        new_trace.stats.npts = len(data)
        new_stream.append(new_trace)
    stream.traces = copied_traces
    return new_stream
示例#21
0
    def section_plot(self,
                     assoc_id,
                     files,
                     seconds_ahead=5,
                     record_length=100,
                     channel='Z'):

        station = self.assoc_db.query(
            Candidate.sta).filter(Candidate.assoc_id == assoc_id).all()
        sta_list = []
        for sta, in station:
            sta_list.append(str(sta))
        station_single = self.assoc_db.query(Pick.sta).filter(
            Pick.assoc_id == assoc_id).filter(Pick.locate_flag == None).all()
        for sta, in station_single:
            sta_list.append(str(sta))
        #print sta_list

        eve = self.assoc_db.query(Associated).filter(
            Associated.id == assoc_id).first()
        # Earthquakes' epicenter
        eq_lat = eve.latitude
        eq_lon = eve.longitude

        # Reading the waveforms
        ST = Stream()
        for file in files:
            st = read(file)
            ST += st

        # in case of some seismometer use channel code like BH1, BH2 or BH3, resign the channel code as:
        if channel == 'E' or channel == 'e':
            Chan = 'E1'
        elif channel == 'N' or channel == 'n':
            Chan = 'N2'
        elif channel == 'Z' or channel == 'z':
            Chan = 'Z3'
        else:
            print(
                'Please input component E, e, N, n, Z, or z, the default is Z')

        # Calculating distance from headers lat/lon
        ST_new = Stream()  #;print ST
        for tr in ST:
            if tr.stats.channel[2] in Chan and tr.stats.station in sta_list:
                if tr.stats.starttime.datetime < eve.ot and tr.stats.endtime.datetime > eve.ot:
                    tr.trim(
                        UTCDateTime(eve.ot - timedelta(seconds=seconds_ahead)),
                        UTCDateTime(eve.ot + timedelta(seconds=record_length)))
                    ST_new += tr
        #print ST_new.__str__(extended=True)

        while True:
            ST_new_sta = []
            for tr in ST_new:
                ST_new_sta.append(tr.stats.station)
            duplicate = list(
                set([tr for tr in ST_new_sta if ST_new_sta.count(tr) > 1]))
            if not duplicate:
                break
            index = [
                i for (i, j) in enumerate(ST_new_sta) if j == duplicate[-1]
            ]
            i = 0
            while True:
                if ST_new[index[i]].stats.npts < ST_new[index[i +
                                                              1]].stats.npts:
                    del ST_new[index[i]]
                    break
                elif ST_new[index[i]].stats.npts >= ST_new[index[
                        i + 1]].stats.npts:
                    del ST_new[index[i + 1]]
                    break
        #print ST_new.__str__(extended=True)

        ST_new.detrend('demean')
        #     ST_new.filter('bandpass', freqmin=0.1, freqmax=100)

        factor = 10
        numRows = len(ST_new)
        segs = []
        ticklocs = []
        sta = []
        circle_x = []
        circle_y = []
        segs_picks = []
        ticklocs_picks = []
        for tr in ST_new:
            dmax = tr.data.max()
            dmin = tr.data.min()
            data = tr.data / (dmax - dmin) * factor
            t = np.arange(
                0,
                round(tr.stats.npts / tr.stats.sampling_rate / tr.stats.delta)
            ) * tr.stats.delta  # due to the float point arithmetic issue, can not use "t=np.arange(0,tr.stats.npts/tr.stats.sampling_rate,tr.stats.delta)"
            segs.append(np.hstack((data[:, np.newaxis], t[:, np.newaxis])))
            lon, lat = self.tt_stations_db_3D.query(
                Station3D.longitude, Station3D.latitude).filter(
                    Station3D.sta == tr.stats.station).first()
            #      distance = int(gps2DistAzimuth(lat,lon,eq_lat,eq_lon)[0]/1000.)  #gps2DistAzimuth return in meters, convert to km by /1000
            distance = int(
                gps2dist_azimuth(lat, lon, eq_lat, eq_lon)[0] / 1000.
            )  #gps2DistAzimuth return in meters, convert to km by /1000
            #       distance=self.assoc_db.query(Candidate.d_km).filter(Candidate.assoc_id==assoc_id).filter(Candidate.sta==tr.stats.station).first()[0]#;print distance,tr.stats.station
            ticklocs.append(distance)
            sta.append(tr.stats.station)
            # DOT plot where picks are picked, notice that for vertical trace plot p is queried from Pick table, s from PickModified table
            # horizontal trace plot p and s queried from PickModified table
            if channel == 'Z3':
                picks_p = self.assoc_db.query(
                    Pick.time).filter(Pick.assoc_id == assoc_id).filter(
                        Pick.sta == tr.stats.station).filter(
                            Pick.chan == tr.stats.channel).filter(
                                Pick.phase == 'P').all()
                if not picks_p:
                    picks_p = self.assoc_db.query(PickModified.time).filter(
                        PickModified.assoc_id == assoc_id).filter(
                            PickModified.sta == tr.stats.station).filter(
                                PickModified.phase == 'P').all()
                picks_s = self.assoc_db.query(PickModified.time).filter(
                    PickModified.assoc_id == assoc_id).filter(
                        PickModified.sta == tr.stats.station).filter(
                            PickModified.phase == 'S').all()
#         print picks_p,picks_s
            else:
                picks_p = self.assoc_db.query(PickModified.time).filter(
                    PickModified.assoc_id == assoc_id).filter(
                        PickModified.sta == tr.stats.station).filter(
                            PickModified.phase == 'P').all()
                picks_s = self.assoc_db.query(PickModified.time).filter(
                    PickModified.assoc_id == assoc_id).filter(
                        PickModified.sta == tr.stats.station).filter(
                            PickModified.phase == 'S').all()


#         print picks_p,picks_s
            picks = picks_p + picks_s
            #       picks=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).all()
            for pick, in picks:
                index = int(
                    (pick - eve.ot +
                     timedelta(seconds=seconds_ahead)).total_seconds() /
                    tr.stats.delta)  #;print pick,eve.ot,index,len(data)
                circle_x.append(distance + data[index])
                circle_y.append(t[index])
                # BAR plot where picks are picked
                t_picks = np.array([t[index], t[index]])
                data_picks = np.array([data.min(), data.max()])
                segs_picks.append(
                    np.hstack(
                        (data_picks[:, np.newaxis], t_picks[:, np.newaxis])))
                ticklocs_picks.append(distance)
        tick_max = max(ticklocs)
        tick_min = min(ticklocs)
        offsets = np.zeros((numRows, 2), dtype=float)
        offsets[:, 0] = ticklocs
        offsets_picks = np.zeros((len(segs_picks), 2), dtype=float)
        offsets_picks[:, 0] = ticklocs_picks

        #lines=LineCollection(segs,offsets=offsets,transOffset=None,linewidths=.25,colors=[colorConverter.to_rgba(i) for i in ('b','g','r','c','m','y','k')]) #color='gray'
        lines = LineCollection(segs,
                               offsets=offsets,
                               transOffset=None,
                               linewidths=.25,
                               color='gray')
        #lines_picks=LineCollection(segs_picks,offsets=offsets_picks,transOffset=None,linewidths=1,color='r')
        lines_picks = LineCollection(segs_picks,
                                     offsets=offsets_picks,
                                     transOffset=None,
                                     linewidths=1,
                                     color='k')

        #print sta,ticklocs
        fig = plt.figure(figsize=(15, 8))
        ax1 = fig.add_subplot(111)
        #ax1.plot(circle_x,circle_y,'o')  # blue dots indicating where to cross the waveforms
        ax1.plot(circle_x, circle_y, 'o', c='gray')
        x0 = tick_min - (tick_max - tick_min) * 0.1
        x1 = tick_max + (tick_max - tick_min) * 0.1
        ylim(0, record_length)
        xlim(0, x1)
        ax1.add_collection(lines)
        ax1.add_collection(lines_picks)
        ax1.set_xticks(ticklocs)
        ax1.set_xticklabels(sta)
        ax1.invert_yaxis()
        ax1.xaxis.tick_top()
        #     ax2 = ax1.twiny()
        #     ax2.xaxis.tick_bottom()
        plt.setp(plt.xticks()[1], rotation=45)
        #xlabel('Station (km)')
        xlabel('channel: ' + channel, fontsize=18)
        ylabel('Record Length (s)', fontsize=18)
        #     plt.title('Section Plot of Event at %s'%(tr.stats.starttime))
        #     plt.tight_layout()

        plt.show()
示例#22
0
def coincidence_trigger(trigger_type,
                        thr_on,
                        thr_off,
                        stream,
                        thr_coincidence_sum,
                        trace_ids=None,
                        max_trigger_length=1e6,
                        delete_long_trigger=False,
                        trigger_off_extension=0,
                        details=False,
                        event_templates={},
                        similarity_threshold=0.7,
                        **options):
    """
    Perform a network coincidence trigger.
    The routine works in the following steps:
      * take every single trace in the stream
      * apply specified triggering routine (can be skipped to work on
        precomputed custom characteristic functions)
      * evaluate all single station triggering results
      * compile chronological overall list of all single station triggers
      * find overlapping single station triggers
      * calculate coincidence sum of every individual overlapping trigger
      * add to coincidence trigger list if it exceeds the given threshold
      * optional: if master event templates are provided, also check single
        station triggers individually and include any single station trigger if
        it exceeds the specified similarity threshold even if no other stations
        coincide with the trigger
      * return list of network coincidence triggers
    .. note::
        An example can be found in the
        `Trigger/Picker Tutorial
        <https://tutorial.obspy.org/code_snippets/trigger_tutorial.html>`_.
    .. note::
        Setting `trigger_type=None` precomputed characteristic functions can
        be provided.
    .. seealso:: [Withers1998]_ (p. 98) and [Trnkoczy2012]_
    :param trigger_type: String that specifies which trigger is applied (e.g.
        ``'recstalta'``). See e.g. :meth:`obspy.core.trace.Trace.trigger` for
        further details. If set to `None` no triggering routine is applied,
        i.e.  data in traces is supposed to be a precomputed characteristic
        function on which the trigger thresholds are evaluated.
    :type trigger_type: str or None
    :type thr_on: float
    :param thr_on: threshold for switching single station trigger on
    :type thr_off: float
    :param thr_off: threshold for switching single station trigger off
    :type stream: :class:`~obspy.core.stream.Stream`
    :param stream: Stream containing waveform data for all stations. These
        data are changed inplace, make a copy to keep the raw waveform data.
    :type thr_coincidence_sum: int or float
    :param thr_coincidence_sum: Threshold for coincidence sum. The network
        coincidence sum has to be at least equal to this value for a trigger to
        be included in the returned trigger list.
    :type trace_ids: list or dict, optional
    :param trace_ids: Trace IDs to be used in the network coincidence sum. A
        dictionary with trace IDs as keys and weights as values can
        be provided. If a list of trace IDs is provided, all
        weights are set to 1. The default of ``None`` uses all traces present
        in the provided stream. Waveform data with trace IDs not
        present in this list/dict are disregarded in the analysis.
    :type max_trigger_length: int or float
    :param max_trigger_length: Maximum single station trigger length (in
        seconds). ``delete_long_trigger`` controls what happens to single
        station triggers longer than this value.
    :type delete_long_trigger: bool, optional
    :param delete_long_trigger: If ``False`` (default), single station
        triggers are manually released at ``max_trigger_length``, although the
        characteristic function has not dropped below ``thr_off``. If set to
        ``True``, all single station triggers longer than
        ``max_trigger_length`` will be removed and are excluded from
        coincidence sum computation.
    :type trigger_off_extension: int or float, optional
    :param trigger_off_extension: Extends search window for next trigger
        on-time after last trigger off-time in coincidence sum computation.
    :type details: bool, optional
    :param details: If set to ``True`` the output coincidence triggers contain
        more detailed information: A list with the trace IDs (in addition to
        only the station names), as well as lists with single station
        characteristic function peak values and standard deviations in the
        triggering interval and mean values of both, relatively weighted like
        in the coincidence sum. These values can help to judge the reliability
        of the trigger.
    :param options: Necessary keyword arguments for the respective trigger
        that will be passed on. For example ``sta`` and ``lta`` for any STA/LTA
        variant (e.g. ``sta=3``, ``lta=10``).
        Arguments ``sta`` and ``lta`` (seconds) will be mapped to ``nsta``
        and ``nlta`` (samples) by multiplying with sampling rate of trace.
        (e.g. ``sta=3``, ``lta=10`` would call the trigger with 3 and 10
        seconds average, respectively)
    :param event_templates: Event templates to use in checking similarity of
        single station triggers against known events. Expected are streams with
        three traces for Z, N, E component. A dictionary is expected where for
        each station used in the trigger, a list of streams can be provided as
        the value to the network/station key (e.g. {"GR.FUR": [stream1,
        stream2]}). Templates are compared against the provided `stream`
        without the specified triggering routine (`trigger_type`) applied.
    :type event_templates: dict
    :param similarity_threshold: similarity threshold (0.0-1.0) at which a
        single station trigger gets included in the output network event
        trigger list. A common threshold can be set for all stations (float) or
        a dictionary mapping station names to float values for each station.
    :type similarity_threshold: float or dict
    :rtype: list
    :returns: List of event triggers sorted chronologically.
    """
    st = stream.copy()
    # if no trace ids are specified use all traces ids found in stream
    if trace_ids is None:
        trace_ids = [tr.id for tr in st]
    # we always work with a dictionary with trace ids and their weights later
    if isinstance(trace_ids, list) or isinstance(trace_ids, tuple):
        trace_ids = dict.fromkeys(trace_ids, 1)
    # set up similarity thresholds as a dictionary if necessary
    if not isinstance(similarity_threshold, dict):
        similarity_threshold = dict.fromkeys([tr.stats.station for tr in st],
                                             similarity_threshold)

    # the single station triggering
    triggers = [
    ]  # eventually becomes detections that are coincident on multiple stations
    cfts = Stream()
    # prepare kwargs for trigger_onset
    kwargs = {'max_len_delete': delete_long_trigger}
    for tr in st:
        if tr.id not in trace_ids:
            msg = "At least one trace's ID was not found in the " + \
                  "trace ID list and was disregarded (%s)" % tr.id
            warnings.warn(msg, UserWarning)
            continue
        if trigger_type is not None:
            tr.trigger(trigger_type, **options)
            cfts.append(tr)

        kwargs['max_len'] = int(max_trigger_length * tr.stats.sampling_rate +
                                0.5)
        tmp_triggers = trigger_onset(tr.data, thr_on, thr_off, **kwargs)
        for on, off in tmp_triggers:
            try:
                cft_peak = tr.data[on:off].max()
                cft_std = tr.data[on:off].std()
            except ValueError:
                cft_peak = tr.data[on]
                cft_std = 0
            on = tr.stats.starttime + float(on) / tr.stats.sampling_rate
            off = tr.stats.starttime + float(off) / tr.stats.sampling_rate
            triggers.append(
                (on.timestamp, off.timestamp, tr.id, cft_peak, cft_std))
    triggers.sort()

    # the coincidence triggering and coincidence sum computation
    coincidence_triggers = []
    last_off_time = 0.0
    while triggers != []:
        # remove first trigger from list and look for overlaps
        on, off, tr_id, cft_peak, cft_std = triggers.pop(0)
        sta = tr_id.split(".")[1]
        event = {}
        event['time'] = UTCDateTime(on)
        event['stations'] = [tr_id.split(".")[1]]
        event['trace_ids'] = [tr_id]
        event['coincidence_sum'] = float(trace_ids[tr_id])
        event['similarity'] = {}
        if details:
            event['cft_peaks'] = [cft_peak]
            event['cft_stds'] = [cft_std]
        # evaluate maximum similarity for station if event templates were
        # provided
        templates = event_templates.get(sta)
        if templates:
            event['similarity'][sta] = \
                templates_max_similarity(stream, event['time'], templates)
        # compile the list of stations that overlap with the current trigger
        for trigger in triggers:
            tmp_on, tmp_off, tmp_tr_id, tmp_cft_peak, tmp_cft_std = trigger
            tmp_sta = tmp_tr_id.split(".")[1]
            # skip retriggering of already present station in current
            # coincidence trigger
            if tmp_tr_id in event['trace_ids']:
                continue
            # check for overlapping trigger,
            # break if there is a gap in between the two triggers
            if tmp_on > off + trigger_off_extension:
                break
            event['stations'].append(tmp_sta)
            event['trace_ids'].append(tmp_tr_id)
            event['coincidence_sum'] += trace_ids[tmp_tr_id]
            if details:
                event['cft_peaks'].append(tmp_cft_peak)
                event['cft_stds'].append(tmp_cft_std)
            # allow sets of triggers that overlap only on subsets of all
            # stations (e.g. A overlaps with B and B overlaps w/ C => ABC)
            off = max(off, tmp_off)
            # evaluate maximum similarity for station if event templates were
            # provided
            templates = event_templates.get(tmp_sta)
            if templates:
                event['similarity'][tmp_sta] = \
                    templates_max_similarity(stream, event['time'], templates)
        # skip if both coincidence sum and similarity thresholds are not met
        if event['coincidence_sum'] < thr_coincidence_sum:
            if not event['similarity']:
                continue
            elif not any([
                    val > similarity_threshold[_s]
                    for _s, val in event['similarity'].items()
            ]):
                continue
        # skip coincidence trigger if it is just a subset of the previous
        # (determined by a shared off-time, this is a bit sloppy)
        if off <= last_off_time:
            continue
        event['duration'] = off - on
        if details:
            weights = np.array([trace_ids[i] for i in event['trace_ids']])
            weighted_values = np.array(event['cft_peaks']) * weights
            event['cft_peak_wmean'] = weighted_values.sum() / weights.sum()
            weighted_values = np.array(event['cft_stds']) * weights
            event['cft_std_wmean'] = \
                (np.array(event['cft_stds']) * weights).sum() / weights.sum()
        coincidence_triggers.append(event)
        last_off_time = off
    return cfts, coincidence_triggers
示例#23
0
def get_PGMs(tr,args):

   ta=Stream()
   ta=tr.copy()
   ts=tr.copy()


   for i in range(len(ta)):

      m_dis=0
      m_vel=0
      m_acc=0


      #### Displacement
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_dis=abs(max(ta[i]))
      else:
         m_dis=abs(min(ta[i]))
         
      #### Velocity
      ta[i].data = np.gradient(ta[i].data,ta[i].stats['delta'])
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_vel=abs(max(ta[i]))
      else:
         m_vel=abs(min(ta[i]))
      ts[i].data = ta[i].data
      
      #### Acceleration
      ta[i].data = np.gradient(ta[i].data,ta[i].stats['delta'])
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_acc=abs(max(ta[i]))
      else:
         m_acc=abs(min(ta[i]))

      #store obtained pgms
      tr[i].stats['max_dis']  = m_dis
      tr[i].stats['max_vel']  = m_vel
      tr[i].stats['max_acc']  = m_acc

   #define vectrors for Hz, T and G
   sa=args.sa.split(' ')
   spa=[]
   for l in range(len(sa)-1):
        spa.append(0)
   per=[]
   for l in range(len(sa)-1):
        per.append(0)
   ges=[]
   for l in range(len(sa)-1):
        ges.append(0)


   #now for each value of sa convolve with response of pendulum
   for j in range(len(sa)):
      #apply convolution
      if j >= 1:
           tu=ta.copy()
           T=eval(sa[j])*1.0
           D=eval(sa[0])
           Ts = '%5.3f' % (1/T)
           omega = (2 *  3.14159 * T)**2

           paz_sa=cornFreq2Paz(T,damp=D)
           paz_sa['sensitivity'] =omega 
           paz_sa['zeros'] = [] 
           for n in range(len(tu)):
              tu[n].simulate(paz_remove=None,paz_simulate=paz_sa,taper=True, simulate_sensitivity=True, taper_fraction=0.050000000000000003)
           
           
           per[j-1] = Ts
           
 #         #now measure for each i
           for i in range(len(tu)):
              if abs(max(tu[i])) >= abs(min(tu[i])):
                val=abs(max(tu[i]))
              else:
                val=abs(min(tu[i]))

              g=val/9.80665*100
              g='%10.3e' % (g)
              val='%10.3e' % (val)
              #here give spectral acceleration in standard units m/s^2
              # and not in g (suitable only for shakemap, can be
              # later converted
              tr[i]=UpdatePsaHeader(tr[i],j,val)

            
   for i in range(len(tr)):
     tr[i].stats['Tsa'] = per
     
   return tr
示例#24
0
def _getData(pick, fds, ic):
    if pick[0][0] == '#':  #this line is commented
        return None
    net = pick[6]
    st = pick[7]
    ch = pick[8]
    time = UTCDateTime(float(pick[9]))

    starttime = time - 20
    endtime = time + 40

    if ch == '00T':

        baz = float(pick[14])
        #get all the horizontal short-period channels at loc '00' or '' (assuming the 00 in 00T refers to
        #loc)
        try:

            inv = fds.get_stations(starttime, endtime, network=net, station=st)
            #find appropriate channels in the inventory (ASDF returns list, not inventory type)
            for chan in inv:
                if re.search('^[HBS].[N1]$', chan[3]):
                    break
            if not re.search('^[HBS].[N1]$', chan[3]):
                raise Exception('no appropriate horizontal channels found')

            stream = fds.get_waveforms(net, st, chan[2], chan[3], starttime,
                                       endtime)
            wf1 = stream[0]
            if wf1.stats['channel'][-1] == '1':
                wf2 = fds.get_waveforms(net, st, chan[2], chan[3][0:-1] + '2',
                                        starttime, endtime)[0]
                wfz = fds.get_waveforms(net, st, chan[2], chan[3][0:-1] + 'Z',
                                        starttime, endtime)[0]

                stream = Stream(traces=[wfz, wf1, wf2])
                #this is a necessary hack - ASDF inventories do not have orientation data for
                #the 1 and 2 channels so it is impossible to rotate to ZNE accurately. Therefore
                #we get station data off IRIS. All the OA installations have ZNE data provided,
                #so only the permanent stations need to rotate 12->NE, and being permanent they should
                #have metadata available through IRIS.
                IRISinv = ic.get_stations(network=net,
                                          station=st,
                                          channel=ch,
                                          level="channel")
                stream = stream.rotate(method="->ZNE", inventory=IRISinv)

            else:
                wf2 = fds.get_waveforms(net, st, chan[2], chan[3][0:-1] + 'E',
                                        starttime, endtime)[0]
                stream = Stream(traces=[wf1, wf2])
        except Exception as e:
            #handle data missing
            print >> sys.stderr, e
            stream = None

        wf = None
        if stream:
            try:
                stream = stream.rotate(method='NE->RT', back_azimuth=baz)
                for trywf in stream:
                    if trywf.stats['channel'][-1] == 'T':
                        wf = trywf
                        break
            except Exception as e:
                print >> sys.stderr, e
                wf = None

    else:
        try:
            #just grab the channel that the pick was on
            inv = fds.get_stations(starttime,
                                   endtime,
                                   network=net,
                                   station=st,
                                   channel=ch)
            loc = inv[0][2]
            wf = fds.get_waveforms(net, st, loc, ch, starttime, endtime)[0]
        except Exception as e:
            print >> sys.stderr, e
            wf = None
    if wf and len(wf) > 100:
        wf.resample(100)
        wf.detrend()
        wf.normalize()
        return wf.data
    else:
        print >> sys.stderr, 'Waveform not found. Skipping pick...'
        return None
示例#25
0
 def requestWaveformTraces(self, requests):
     """
     Method for requesting data from WaveformLocation. Implement this to get waveform data.
     """
     stream = Stream()
     return stream
示例#26
0
import glob
from obspy.imaging.cm import obspy_sequential
from obspy.signal.invsim import corn_freq_2_paz
from obspy.signal.array_analysis import array_processing
import matplotlib.ticker as ticker

from obspy.core.stream import Stream

time_min = -40
time_max = 80

dir = '/raid3/zl382/Data/20161225/fk_analysis/az_330.0dist_123.0/'
seislist = glob.glob(dir + '*PICKLE')

#Load data
st = Stream()
for i, seisname in enumerate(seislist):
    seis = read(seisname, format='PICKLE')
    st += seis.select(channel='BHT')
    st[i].stats.coordinates = AttribDict({
        'latitude': seis[0].stats['stla'],
        'elevation': seis[0].stats['stelv'] / 1000,
        'longitude': seis[0].stats['stlo']
    })
st.resample(10)
# Execute array_processing
Sdifftime = seis[0].stats.traveltimes['Sdiff'] or seis[0].stats.traveltimes['S']
stime = st[0].stats['eventtime'] + Sdifftime - 40
etime = st[0].stats['eventtime'] + Sdifftime + 80
zerotime = st[0].stats['eventtime'] + Sdifftime
示例#27
0
文件: trigger.py 项目: jinwuLi/REDPy
def trigger(st, stC, rtable, opt):

    """
    Run triggering algorithm on a stream of data.

    st: OBSPy stream of data
    rtable: Repeater table contains reference time of previous trigger in samples
    opt: Options object describing station/run parameters

    Returns triggered traces as OBSPy trace object updates ptime for next run 
    """
    
    tr = st[0]
    t = tr.stats.starttime

    cft = coincidence_trigger(opt.trigalg, opt.trigon, opt.trigoff, stC, opt.nstaC,
        sta=opt.swin, lta=opt.lwin, details=True)
            
    if len(cft) > 0:
        
        ind = 0
        
        # Slice out the data from st and save the maximum STA/LTA ratio value for
        # use in orphan expiration
        
        # Convert ptime from time of last trigger to seconds before start time
        if rtable.attrs.ptime:
            ptime = (UTCDateTime(rtable.attrs.ptime) - t)
        else:
            ptime = -opt.mintrig
                
        for n in range(len(cft)):
                    
            ttime = cft[n]['time'] # This is a UTCDateTime, not samples
            
            if (ttime >= t + opt.atrig) and (ttime >= t + ptime +
                opt.mintrig) and (ttime < t + len(tr.data)/opt.samprate -
                2*opt.atrig):
                
                ptime = ttime - t
                
                # Cut out and append all data to first trace              
                tmp = st.slice(ttime - opt.ptrig, ttime + opt.atrig)
                ttmp = tmp.copy()
                ttmp = ttmp.trim(ttime - opt.ptrig, ttime + opt.atrig + 0.05, pad=True,
                    fill_value=0)
                ttmp[0].data = ttmp[0].data[0:opt.wshape] - np.mean(
                    ttmp[0].data[0:opt.wshape])
                for s in range(1,len(ttmp)):
                    ttmp[0].data = np.append(ttmp[0].data, ttmp[s].data[
                        0:opt.wshape] - np.mean(ttmp[s].data[0:opt.wshape]))
                ttmp[0].stats.maxratio = np.max(cft[n]['cft_peaks'])
                if ind is 0:
                    trigs = Stream(ttmp[0])
                    ind = ind+1
                else:
                    trigs = trigs.append(ttmp[0])
                                                         
        if ind is 0:
            return []
        else:
            rtable.attrs.ptime = (t + ptime).isoformat()
            return trigs
    else:
        return []
        newIa, newNIa = arias_intensity.get_arias_intensity(acc, 0.01, starttime)
        
        trace.stats.Ia = Ia
        trace.stats.NIa = NIa
        trace.stats.maxIa = np.amax(Ia)
        trace.stats.PIa = newIa
        trace.stats.PNIa = newNIa
        trace.stats.maxPIa = np.amax(newIa)
        
        time1 = arias_intensity.get_time_from_percent(NIa, 0.05, dt)
        time2 = arias_intensity.get_time_from_percent(NIa, 0.95, dt)
        trace.stats.arias5 = time1
        trace.stats.arias95 = time2    

# Gather all NS components into one stream and sort by dsit
hcomp = Stream()
for sta in stations:
    newtrace = sta.select(channel='NS')
    hcomp += newtrace

sorted_hcomp = hcomp.sort(keys=['distkm'])

# Plot ratio of arias with starttime to full arias
dist_source = []
maxIa = []
maxPIa = []
duration = []
ratio = []
for trace in sorted_hcomp:
    dist = trace.stats.distkm
    arias = trace.stats.maxIa
示例#29
0
文件: trigger.py 项目: jinwuLi/REDPy
def dataClean(alltrigs, opt, flag=1):

    """
    Examine triggers and weed out spikes and calibration pulses using kurtosis and
    outlier ratios
    
    alltrigs: triggers output from triggering
    opt: opt from config
    flag: 1 if defining window to check, 0 if want to check whole waveform for spikes
        (note that different threshold values should be used for different window lengths)
    
    Returns good trigs (trigs) and several junk types (junk, junkFI, junkKurt)
    """
    
    trigs=Stream()
    junkFI=Stream()
    junkKurt=Stream()
    junk=Stream()
    for i in range(len(alltrigs)):
            
        njunk = 0
        ntele = 0
        
        for n in range(opt.nsta):
            
            dat = alltrigs[i].data[n*opt.wshape:(n+1)*opt.wshape]
            if flag == 1:
                datcut=dat[range(int((opt.ptrig-opt.kurtwin/2)*opt.samprate),
                    int((opt.ptrig+opt.kurtwin/2)*opt.samprate))]
            else:
                datcut=dat
            
            if np.sum(np.abs(dat))!=0.0:
                # Calculate kurtosis in window
                k = stats.kurtosis(datcut)
                # Compute kurtosis of frequency amplitude spectrum next
                datf = np.absolute(fft(dat))
                kf = stats.kurtosis(datf)
                # Calculate outlier ratio using z ((data-median)/mad)
                mad = np.nanmedian(np.absolute(dat - np.nanmedian(dat)))
                z = (dat-np.median(dat))/mad
                # Outliers have z > 4.45
                orm = len(z[z>4.45])/np.array(len(z)).astype(float)
            
                if k >= opt.kurtmax or orm >= opt.oratiomax or kf >= opt.kurtfmax:
                    njunk+=1
                
                winstart = int(opt.ptrig*opt.samprate - opt.winlen/10)
                winend = int(opt.ptrig*opt.samprate - opt.winlen/10 + opt.winlen)
                fftwin = np.reshape(fft(dat[winstart:winend]),(opt.winlen,))
                if np.median(np.abs(dat[winstart:winend]))!=0:
                    fi = np.log10(np.mean(np.abs(np.real(
                        fftwin[int(opt.fiupmin*opt.winlen/opt.samprate):int(
                        opt.fiupmax*opt.winlen/opt.samprate)])))/np.mean(np.abs(np.real(
                        fftwin[int(opt.filomin*opt.winlen/opt.samprate):int(
                        opt.filomax*opt.winlen/opt.samprate)]))))
                    if fi<opt.telefi:
                        ntele+=1
        
        # Allow if there are enough good stations to correlate
        if njunk <= (opt.nsta-opt.ncor) and ntele <= opt.teleok:
            trigs.append(alltrigs[i])
        else:
            if njunk > 0:
                if ntele > 0:
                    junk.append(alltrigs[i])
                else:
                    junkKurt.append(alltrigs[i])
            else:
                junkFI.append(alltrigs[i])
                
    return trigs, junk, junkFI, junkKurt
示例#30
0
    def love_pick(self,
                  T_trace,
                  la_s,
                  lo_s,
                  depth,
                  save_directory,
                  time_at_rec,
                  npts,
                  filter=True,
                  plot_modus=False):
        if plot_modus == True:
            dir_L = save_directory + '/Love_waves'
            if not os.path.exists(dir_L):
                os.makedirs(dir_L)
        Love_st = Stream()

        evla = la_s
        evlo = lo_s

        rec = instaseis.Receiver(latitude=self.prior['la_r'],
                                 longitude=self.prior['lo_r'])

        dist, az, baz = gps2dist_azimuth(lat1=evla,
                                         lon1=evlo,
                                         lat2=self.prior['la_r'],
                                         lon2=self.prior['lo_r'],
                                         a=self.prior['radius'],
                                         f=0)

        # For now I am just using the Z-component, because this will have the strongest Rayleigh signal:
        T_comp = T_trace.copy()

        if plot_modus == True:
            T_comp.plot(outfile=dir_L + '/sw_entire_waveform.pdf')
        phases = self.get_L_phases(time_at_rec)

        for i in range(len(phases)):
            if plot_modus == True:
                dir_phases = dir_L + '/%s' % phases[i]['name']
                if not os.path.exists(dir_phases):
                    os.makedirs(dir_phases)
            trial = T_trace.copy()
            if filter == True:
                trial.detrend(type="demean")
                trial.filter('highpass',
                             freq=phases[i]['fmin'],
                             zerophase=True)
                trial.filter('lowpass', freq=phases[i]['fmax'], zerophase=True)
                trial.detrend()

            if plot_modus == True:
                start_vline = int(
                    (phases[i]['starttime'](dist, depth).timestamp -
                     time_at_rec.timestamp) / trial.stats.delta)
                end_vline = int((phases[i]['endtime'](dist, depth).timestamp -
                                 time_at_rec.timestamp) / trial.stats.delta)
                plt.figure(1)
                ax = plt.subplot(111)
                plt.plot(trial.data, alpha=0.5)
                ymin, ymax = ax.get_ylim()
                # plt.plot(trial.data)
                plt.vlines([start_vline, end_vline], ymin, ymax)
                plt.xlabel(time_at_rec.strftime('%Y-%m-%dT%H:%M:%S + sec'))
                plt.savefig(dir_phases + '/sw_with_Love_windows.pdf')
                plt.tight_layout()
                plt.close()

            if filter == True:
                trial.detrend(type="demean")
                env = envelope(trial.data)
                trial.data = env
                trial.trim(starttime=phases[i]['starttime'](dist, depth),
                           endtime=phases[i]['endtime'](dist, depth))
            else:
                env = trial.data
            if plot_modus == True:
                plt.figure(2)
                plt.plot(trial, label='%s' % phases[i]['name'])
                plt.legend()
                plt.tight_layout()
                plt.savefig(dir_phases +
                            '/Love_envelope_filter_%s.pdf' % phases[i]['name'])
                plt.close()

            zero_trace = Trace(np.zeros(npts),
                               header={
                                   "starttime": phases[i]['starttime'](dist,
                                                                       depth),
                                   'delta': trial.meta.delta,
                                   "station": trial.meta.station,
                                   "network": trial.meta.network,
                                   "location": trial.meta.location,
                                   "channel": phases[i]['name']
                               })

            total_trace = zero_trace.__add__(trial,
                                             method=0,
                                             interpolation_samples=0,
                                             fill_value=trial.data,
                                             sanity_checks=False)

            Love_st.append(total_trace)
        if plot_modus == True:
            plt.figure(3)
            plt.plot(Love_st.traces[0].data,
                     label='%s' % Love_st.traces[0].meta.channel)
            plt.plot(Love_st.traces[1].data,
                     label='%s' % Love_st.traces[1].meta.channel)
            plt.plot(Love_st.traces[2].data,
                     label='%s' % Love_st.traces[2].meta.channel)
            plt.plot(Love_st.traces[3].data,
                     label='%s' % Love_st.traces[3].meta.channel)
            plt.legend()
            plt.tight_layout()
            plt.savefig(dir_L + '/diff_Love_freq.pdf')
            plt.close()
        return Love_st