Beispiel #1
0
def axisem2mseed(path):
    """
    change .dat files into MSEED format
    """
    global test_param

    if not os.path.isdir(os.path.join(path, 'MSEED')):
        os.mkdir(os.path.join(path, 'MSEED'))
    else:
        print 'Following directory already exists:'
        print os.path.join(path, 'MSEED')
        sys.exit()

    t = UTCDateTime(0)
    traces = []

    for file in glob.iglob(os.path.join(path, '*.dat')):
        stationID = file.split('/')[-1].split('_')[0]
        networkID = file.split('/')[-1].split('_')[1]
        chan = file.split('/')[-1].split('_')[-1].split('.')[0]
        if chan == 'E': chan = 'BHE'
        elif chan == 'N': chan = 'BHN'
        elif chan == 'Z': chan = 'BHZ'
        try:
            dat = np.loadtxt(file)
            npts = len(dat[:,0])
            stats = {'network': networkID,
                     'station': stationID,
                     'location': '',
                     'channel': chan,
                     'npts': npts,
                     'sampling_rate': (npts - 1.)/(dat[-1,0] - dat[0,0]),
                     'starttime': t + dat[0,0],
                     'mseed' : {'dataquality': 'D'}}
            st = Stream(Trace(data=dat[:,1], header=stats))
            if test_param['convSTF'] == 'Y':
                sigma =  test_param['halfduration'] / np.sqrt(2.) / 3.5
                convSTF(st, sigma=sigma)
            if test_param['filter'] == 'Y':
                st.filter('lowpass', freq=test_param['fmax'], corners=2)
                st.filter('lowpass', freq=test_param['fmax'], corners=2)
                st.filter('lowpass', freq=test_param['fmax'], corners=2)
                st.filter('lowpass', freq=test_param['fmax'], corners=2)
                st.filter('highpass', freq=test_param['fmin'], corners=2)
                st.filter('highpass', freq=test_param['fmin'], corners=2)
            fname =  os.path.join(path, 'MSEED', 'dis.' + stationID + '..' + chan)
            st.write(fname, format='MSEED')
        except Exception, e:
            print e
            print networkID + '.' + stationID + '.' + chan + '.mseed'
            print '-------------------------------------------------'
Beispiel #2
0
                    st_temp += read(file)

    # load 24h continuous waveforms only if template exists
    for tt in st_temp:
        station = tt.stats.station
        channel = tt.stats.channel
        file1 = cont_dir + sday + "." + station + "." + channel
        # print(" file1 ===", file1)
        if os.path.isfile(file1):
            st_cont += read(file1)
        else:
            # remove from the template stream if continuous not exists
            st_temp.remove(tt)

    st_cont.filter("bandpass",
                   freqmin=bandpass[0],
                   freqmax=bandpass[1],
                   zerophase=True)

    # define variables
    # st1_temp = st_temp.select(channel=channel[0])
    tt = Trace()
    tc = Trace()
    count = 0
    nmin = 0

    npanels = len(st_temp)
    nfile = len(st_temp)
    Tstart = np.empty(nfile)
    Tend = np.empty(nfile)
    tdif = np.empty(nfile)
Beispiel #3
0
def main():

    print()
    print("#########################################")
    print("#        __                 _     _     #")
    print("#  _ __ / _|_ __  _   _    | |__ | | __ #")
    print("# | '__| |_| '_ \| | | |   | '_ \| |/ / #")
    print("# | |  |  _| |_) | |_| |   | | | |   <  #")
    print("# |_|  |_| | .__/ \__, |___|_| |_|_|\_\ #")
    print("#          |_|    |___/_____|           #")
    print("#                                       #")
    print("#########################################")
    print()

    # Run Input Parser
    args = arguments.get_hk_arguments()

    # Load Database
    db = stdb.io.load_db(fname=args.indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(args.stkeys) > 0:
        stkeys = []
        for skey in args.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        if args.phase in ['P', 'PP', 'allP']:
            datapath = Path('P_DATA') / stkey
        elif args.phase in ['S', 'SKS', 'allS']:
            datapath = Path('S_DATA') / stkey
        if not datapath.is_dir():
            print('Path to ' + str(datapath) + ' doesn`t exist - continuing')
            continue

        # Define save path
        if args.save:
            savepath = Path('HK_DATA') / stkey
            if not savepath.is_dir():
                print('Path to ' + str(savepath) +
                      ' doesn`t exist - creating it')
                savepath.mkdir(parents=True)

        # Get search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        rfRstream = Stream()

        datafiles = [x for x in datapath.iterdir() if x.is_dir()]
        for folder in datafiles:

            # Skip hidden folders
            if folder.name.startswith('.'):
                continue

            date = folder.name.split('_')[0]
            year = date[0:4]
            month = date[4:6]
            day = date[6:8]
            dateUTC = UTCDateTime(year + '-' + month + '-' + day)

            if dateUTC > tstart and dateUTC < tend:

                # Load meta data
                metafile = folder / "Meta_Data.pkl"
                if not metafile.is_file():
                    continue
                meta = pickle.load(open(metafile, 'rb'))

                # Skip data not in list of phases
                if meta.phase not in args.listphase:
                    continue

                # QC Thresholding
                if meta.snrh < args.snrh:
                    continue
                if meta.snr < args.snr:
                    continue
                if meta.cc < args.cc:
                    continue

                # # Check bounds on data
                # if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]:
                #     continue
                # if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]:
                #     continue

                # If everything passed, load the RF data
                filename = folder / "RF_Data.pkl"
                if filename.is_file():
                    file = open(filename, "rb")
                    rfdata = pickle.load(file)
                    rfRstream.append(rfdata[1])
                    file.close()
                if rfdata[0].stats.npts != 1451:
                    print(folder)

        if len(rfRstream) == 0:
            continue

        if args.no_outl:
            t1 = 0.
            t2 = 30.

            varR = []
            for i in range(len(rfRstream)):
                taxis = rfRstream[i].stats.taxis
                tselect = (taxis > t1) & (taxis < t2)
                varR.append(np.var(rfRstream[i].data[tselect]))
            varR = np.array(varR)

            # Remove outliers wrt variance within time range
            medvarR = np.median(varR)
            madvarR = 1.4826 * np.median(np.abs(varR - medvarR))
            robustR = np.abs((varR - medvarR) / madvarR)
            outliersR = np.arange(len(rfRstream))[robustR > 2.5]
            for i in outliersR[::-1]:
                rfRstream.remove(rfRstream[i])

        print('')
        print("Number of radial RF data: " + str(len(rfRstream)))
        print('')

        # Try binning if specified
        if args.calc_dip:
            rf_tmp = binning.bin_baz_slow(rfRstream,
                                          nbaz=args.nbaz + 1,
                                          nslow=args.nslow + 1,
                                          pws=args.pws)
            rfRstream = rf_tmp[0]
        else:
            rf_tmp = binning.bin(rfRstream,
                                 typ='slow',
                                 nbin=args.nslow + 1,
                                 pws=args.pws)
            rfRstream = rf_tmp[0]

        # Get a copy of the radial component and filter
        if args.copy:
            rfRstream_copy = rfRstream.copy()
            rfRstream_copy.filter('bandpass',
                                  freqmin=args.bp_copy[0],
                                  freqmax=args.bp_copy[1],
                                  corners=2,
                                  zerophase=True)

        # Check bin counts:
        for tr in rfRstream:
            if (tr.stats.nbin < args.binlim):
                rfRstream.remove(tr)

        # Continue if stream is too short
        if len(rfRstream) < 5:
            continue

        if args.save_plot and not Path('HK_PLOTS').is_dir():
            Path('HK_PLOTS').mkdir(parents=True)

        print('')
        print("Number of radial RF bins: " + str(len(rfRstream)))
        print('')

        # Filter original stream
        rfRstream.filter('bandpass',
                         freqmin=args.bp[0],
                         freqmax=args.bp[1],
                         corners=2,
                         zerophase=True)

        # Initialize the HkStack object
        try:
            hkstack = HkStack(rfRstream,
                              rfV2=rfRstream_copy,
                              strike=args.strike,
                              dip=args.dip,
                              vp=args.vp)
        except:
            hkstack = HkStack(rfRstream,
                              strike=args.strike,
                              dip=args.dip,
                              vp=args.vp)

        # Update attributes
        hkstack.hbound = args.hbound
        hkstack.kbound = args.kbound
        hkstack.dh = args.dh
        hkstack.dk = args.dk
        hkstack.weights = args.weights

        # Stack with or without dip
        if args.calc_dip:
            hkstack.stack_dip()
        else:
            hkstack.stack()

        # Average stacks
        hkstack.average(typ=args.typ)

        if args.plot:
            hkstack.plot(args.save_plot, args.title, args.form)

        if args.save:
            filename = savepath / (hkstack.rfV1[0].stats.station + \
                ".hkstack."+args.typ+".pkl")

            hkstack.save(file=filename)
t2 = t + 4 * 3600

stations = ["AIGLE", "SENIN", "DIX", "LAUCH", "MMK", "SIMPL"]
st = Stream()

for station in stations:
    try:
        tmp = client.getWaveform("CH", station, "", "[EH]HZ", t, t2,
                                 metadata=True)
    except:
        print station, "---"
        continue
    st += tmp

st.taper()
st.filter("bandpass", freqmin=1, freqmax=20)
triglist = coincidenceTrigger("recstalta", 10, 2, st, 4, sta=0.5, lta=10)
print len(triglist), "events triggered."

for trig in triglist:
    closest_sta = trig['stations'][0]
    tr = st.select(station=closest_sta)[0]
    trig['latitude'] = tr.stats.coordinates.latitude
    trig['longitude'] = tr.stats.coordinates.longitude

paz_wa = {'sensitivity': 2800, 'zeros': [0j], 'gain': 1,
          'poles': [-6.2832-4.7124j, -6.2832+4.7124j]}

for trig in triglist:
    t = trig['time']
    print "#" * 80
Beispiel #5
0
def main():

    print()
    print(
        "################################################################################"
    )
    print(
        "#        __                 _                                      _           #"
    )
    print(
        "#  _ __ / _|_ __  _   _    | |__   __ _ _ __ _ __ ___   ___  _ __ (_) ___ ___  #"
    )
    print(
        "# | '__| |_| '_ \| | | |   | '_ \ / _` | '__| '_ ` _ \ / _ \| '_ \| |/ __/ __| #"
    )
    print(
        "# | |  |  _| |_) | |_| |   | | | | (_| | |  | | | | | | (_) | | | | | (__\__ \ #"
    )
    print(
        "# |_|  |_| | .__/ \__, |___|_| |_|\__,_|_|  |_| |_| |_|\___/|_| |_|_|\___|___/ #"
    )
    print(
        "#          |_|    |___/_____|                                                  #"
    )
    print(
        "#                                                                              #"
    )
    print(
        "################################################################################"
    )
    print()

    # Run Input Parser
    args = arguments.get_harmonics_arguments()

    # Load Database
    db = stdb.io.load_db(fname=args.indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(args.stkeys) > 0:
        stkeys = []
        for skey in args.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        datapath = Path('DATA') / stkey
        if not datapath.is_dir():
            raise (Exception('Path to ' + str(datapath) +
                             ' doesn`t exist - aborting'))

        # Get search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        rfRstream = Stream()
        rfTstream = Stream()

        datafiles = [x for x in datapath.iterdir() if x.is_dir()]
        for folder in datafiles:

            # Skip hidden folders
            if folder.name.startswith('.'):
                continue

            date = folder.name.split('_')[0]
            year = date[0:4]
            month = date[4:6]
            day = date[6:8]
            dateUTC = UTCDateTime(year + '-' + month + '-' + day)

            if dateUTC > tstart and dateUTC < tend:

                filename = folder / "RF_Data.pkl"
                if filename.is_file():
                    file = open(filename, "rb")
                    rfdata = pickle.load(file)
                    if rfdata[0].stats.snrh > args.snrh and rfdata[0].stats.snr and \
                            rfdata[0].stats.cc > args.cc:

                        rfRstream.append(rfdata[1])
                        rfTstream.append(rfdata[2])

                    file.close()

            else:
                continue

        if args.no_outl:
            # Remove outliers wrt variance
            varR = np.array([np.var(tr.data) for tr in rfRstream])

            # Calculate outliers
            medvarR = np.median(varR)
            madvarR = 1.4826 * np.median(np.abs(varR - medvarR))
            robustR = np.abs((varR - medvarR) / madvarR)
            outliersR = np.arange(len(rfRstream))[robustR > 2.]
            for i in outliersR[::-1]:
                rfRstream.remove(rfRstream[i])
                rfTstream.remove(rfTstream[i])

            # Do the same for transverse
            varT = np.array([np.var(tr.data) for tr in rfTstream])
            medvarT = np.median(varT)
            madvarT = 1.4826 * np.median(np.abs(varT - medvarT))
            robustT = np.abs((varT - medvarT) / madvarT)
            outliersT = np.arange(len(rfTstream))[robustT > 2.]
            for i in outliersT[::-1]:
                rfRstream.remove(rfRstream[i])
                rfTstream.remove(rfTstream[i])

        # Try binning if specified
        if args.nbin is not None:
            rf_tmp = binning.bin(rfRstream,
                                 rfTstream,
                                 typ='baz',
                                 nbin=args.nbin + 1)
            rfRstream = rf_tmp[0]
            rfTstream = rf_tmp[1]

        # Filter original streams
        rfRstream.filter('bandpass',
                         freqmin=args.bp[0],
                         freqmax=args.bp[1],
                         corners=2,
                         zerophase=True)
        rfTstream.filter('bandpass',
                         freqmin=args.bp[0],
                         freqmax=args.bp[1],
                         corners=2,
                         zerophase=True)

        # Initialize the HkStack object
        harmonics = Harmonics(rfRstream, rfTstream)

        # Stack with or without dip
        if args.find_azim:
            harmonics.dcomp_find_azim(xmin=args.trange[0], xmax=args.trange[1])
            print("Optimal azimuth for trange between " + str(args.trange[0]) +
                  " and " + str(args.trange[1]) + "is: " + str(harmonics.azim))
        else:
            harmonics.dcomp_fix_azim(azim=args.azim)

        if args.plot:
            harmonics.plot(args.ymax, args.scale, args.save_plot, args.title,
                           args.form)

        if args.save:
            filename = datapath / (hkstack.hstream[0].stats.station +
                                   ".harmonics.pkl")
            harmonics.save()
Beispiel #6
0
if exceptions:
    summary.append("#" * 33 + " Exceptions  " + "#" * 33)
    summary += exceptions
summary.append("#" * 79)

trig = []
mutt = []
if st:
    # preprocessing, backup original data for plotting at end
    st.merge(0)
    st.detrend("linear")
    for tr in st:
        tr.data = tr.data * cosTaper(len(tr), 0.01)
    #st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False)
    st.sort()
    st.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True)
    st.trim(T1, T2)
    st_trigger = st.copy()
    st.normalize(global_max=False)
    # do the triggering
    trig = coincidenceTrigger("recstalta", PAR.ON, PAR.OFF, st_trigger,
            thr_coincidence_sum=PAR.MIN_STATIONS,
            max_trigger_length=PAR.MAXLEN, trigger_off_extension=PAR.ALLOWANCE,
            details=True, sta=PAR.STA, lta=PAR.LTA)

    for t in trig:
        info = "%s %ss %s %s" % (t['time'].strftime("%Y-%m-%dT%H:%M:%S"), ("%.1f" % t['duration']).rjust(4), ("%i" % t['cft_peak_wmean']).rjust(3), "-".join(t['stations']))
        summary.append(info)
        tmp = st.slice(t['time'] - 1, t['time'] + t['duration'])
        outfilename = "%s/%s_%.1f_%i_%s-%s_%s.png" % (PLOTDIR, t['time'].strftime("%Y-%m-%dT%H:%M:%S"), t['duration'], t['cft_peak_wmean'], len(t['stations']), num_stations, "-".join(t['stations']))
        tmp.plot(outfile=outfilename)
Beispiel #7
0
    seis = read(seisname, format='PICKLE')
    stTOGETHER += seis.select(channel='BAT')
    stTOGETHER[-1].stats = seis[0].stats
    sdist = seis[0].stats['dist']
    ArrayLocation.append([sdist - center_dist, 0])
    ArrayXY.append([0, sdist - center_dist])
    align_time.append(seis[0].stats.traveltimes['Sdiff']
                      or seis[0].stats.traveltimes['S'])

ArrayLocation = np.array(ArrayLocation)
align_time = np.array(align_time)
T = np.dot(slxy, np.transpose(ArrayXY))

stTOGETHER.write(dir + 'Bundles.PICKLE',
                 'PICKLE')  # Store the raw data before filter
stTOGETHER.filter('bandpass', freqmin=fmin, freqmax=fmax, zerophase=True)
norm = np.max(stTOGETHER[0].data) / norm_constant

# Loop in slowness meshgrid
N = len(stTOGETHER)
power = np.zeros(np.shape(sl))
index = 0
total = np.size(sl)


def POWER(c):
    global index
    index += nproc
    progress(index, total, status='Doing very long job')
    for i, st in enumerate(stTOGETHER):
        w1 = np.argmin(
Beispiel #8
0
 def plot(self, filter=None, save=False, show=True, ttgrid=None):
     st = Stream()
     willy = seispy.burrow.Groundhog()
     distance = sorted([
         gps2dist_azimuth(self.lat, self.lon, arrival.station.lat,
                          arrival.station.lon)[0]
         for arrival in self.arrivals
     ])
     dmin, dmax = min(distance), max(distance)
     startlag = dmin / 6000. - 7
     endlag = dmax / 2000. + 5
     for arrival in self.arrivals:
         st += willy.fetch(arrival.station.name,
                           arrival.channel.code,
                           starttime=self.time + startlag,
                           endtime=self.time + endlag)
     arrivals = sorted(
         self.arrivals,
         key=lambda arrival:
         (arrival.station.network, arrival.station.name, arrival.channel))
     if filter is not None:
         st.filter(*filter[0], **filter[1])
     st.trim(starttime=self.time + startlag + 2.)
     st.normalize()
     MAX_TRACES = 9
     ncol = int(ceil(len(st) / float(MAX_TRACES))) + 1
     nrow = int(ceil(len(st) / float(ncol - 1)))
     gs = GridSpec(nrow, ncol)
     gs.update(hspace=0, wspace=0)
     width = 1600
     height = width / float(ncol)
     fig = st.plot(size=(width, height), handle=True)
     row, col = 0, 0
     for i in range(len(fig.axes)):
         ax = fig.axes[i]
         arrival = arrivals[i]
         color = "r" if arrival.phase == "P"\
             else "g" if arrival.phase == "S" else "b"
         ax.axvline(arrival.time.toordinal() +
                    arrival.time._get_hours_after_midnight() / 24.,
                    color=color,
                    linewidth=2,
                    alpha=0.75)
         if ttgrid is not None:
             r, theta, phi = sp.geometry.geo2sph(self.lat, self.lon,
                                                 self.depth)
             try:
                 predicted = self.time + ttgrid.get_tt(
                     arrival.station.name, arrival.phase, r, theta, phi)
             except KeyError:
                 continue
             ax.axvline(predicted.toordinal() +
                        predicted._get_hours_after_midnight() / 24.,
                        color=color,
                        linewidth=2,
                        linestyle="--",
                        alpha=0.75)
         if row % nrow == 0:
             col += 1
             row = 0
         position = gs[row, col].get_position(fig)
         ax.set_position(position)
         ax.get_yaxis().set_visible(False)
         row += 1
     for ax in fig.axes[nrow - 1::nrow] + [fig.axes[-1]]:
         ax.set_xticklabels(ax.get_xticklabels(),
                            visible=True,
                            fontsize=10,
                            rotation=-15,
                            horizontalalignment="left")
     gs.update(wspace=0.2)
     postl = gs[0].get_position(fig)
     posbl = gs[ncol * (nrow - 1)].get_position(fig)
     bbox_map = Bbox(((posbl.x0, posbl.y0), (posbl.x1, postl.y1)))
     ax = fig.add_axes(bbox_map)
     self.plot_map(ax=ax)
     fig.suptitle("%s  (ID #%d)" % (self.time, self.evid))
     if save:
         plt.savefig("%s.png" % save, format="png")
     if show:
         plt.show()
     else:
         plt.close()
Beispiel #9
0
 def test_coincidenceTrigger(self):
     """
     Test network coincidence trigger.
     """
     st = Stream()
     files = ["BW.UH1._.SHZ.D.2010.147.cut.slist.gz",
              "BW.UH2._.SHZ.D.2010.147.cut.slist.gz",
              "BW.UH3._.SHZ.D.2010.147.cut.slist.gz",
              "BW.UH4._.EHZ.D.2010.147.cut.slist.gz"]
     for filename in files:
         filename = os.path.join(self.path, filename)
         st += read(filename)
     # some prefiltering used for UH network
     st.filter('bandpass', freqmin=10, freqmax=20)
     # 1. no weighting, no stations specified, good settings
     # => 3 events, no false triggers
     # for the first test we make some additional tests regarding types
     res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3, sta=0.5,
                              lta=10)
     self.assertTrue(isinstance(res, list))
     self.assertTrue(len(res) == 3)
     expected_keys = ['time', 'coincidence_sum', 'duration', 'stations',
                      'trace_ids']
     expected_types = [UTCDateTime, float, float, list, list]
     for item in res:
         self.assertTrue(isinstance(item, dict))
         for key, _type in zip(expected_keys, expected_types):
             self.assertTrue(key in item)
             self.assertTrue(isinstance(item[key], _type))
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[0]['coincidence_sum'] == 4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['coincidence_sum'] == 3)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[2]['coincidence_sum'] == 4)
     # 2. no weighting, station selection
     # => 2 events, no false triggers
     trace_ids = ['BW.UH1..SHZ', 'BW.UH3..SHZ', 'BW.UH4..EHZ']
     # ignore UserWarnings
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3,
                                  trace_ids=trace_ids, sta=0.5, lta=10)
         self.assertTrue(len(re) == 2)
         self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
         self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
         self.assertTrue(4.2 < re[0]['duration'] < 4.8)
         self.assertTrue(re[0]['stations'] == ['UH3', 'UH1', 'UH4'])
         self.assertTrue(re[0]['coincidence_sum'] == 3)
         self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
         self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
         self.assertTrue(4.2 < re[1]['duration'] < 4.4)
         self.assertTrue(re[1]['stations'] == ['UH3', 'UH1', 'UH4'])
         self.assertTrue(re[1]['coincidence_sum'] == 3)
     # 3. weighting, station selection
     # => 3 events, no false triggers
     trace_ids = {'BW.UH1..SHZ': 0.4, 'BW.UH2..SHZ': 0.35,
                  'BW.UH3..SHZ': 0.4, 'BW.UH4..EHZ': 0.25}
     res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.0,
                              trace_ids=trace_ids, sta=0.5, lta=10)
     self.assertTrue(len(res) == 3)
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[0]['coincidence_sum'] == 1.4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['coincidence_sum'] == 1.15)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[2]['coincidence_sum'] == 1.4)
     # 4. weighting, station selection, max_len
     # => 2 events, no false triggers, small event does not overlap anymore
     trace_ids = {'BW.UH1..SHZ': 0.6, 'BW.UH2..SHZ': 0.6}
     # ignore UserWarnings
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.2,
                                  trace_ids=trace_ids,
                                  max_trigger_length=0.13, sta=0.5, lta=10)
         self.assertTrue(len(re) == 2)
         self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
         self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
         self.assertTrue(0.2 < re[0]['duration'] < 0.3)
         self.assertTrue(re[0]['stations'] == ['UH2', 'UH1'])
         self.assertTrue(re[0]['coincidence_sum'] == 1.2)
         self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
         self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
         self.assertTrue(0.18 < re[1]['duration'] < 0.2)
         self.assertTrue(re[1]['stations'] == ['UH2', 'UH1'])
         self.assertTrue(re[1]['coincidence_sum'] == 1.2)
     # 5. station selection, extremely sensitive settings
     # => 4 events, 1 false triggers
     res = coincidenceTrigger("recstalta", 2.5, 1, st.copy(), 2,
                              trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'],
                              sta=0.3, lta=5)
     self.assertTrue(len(res) == 5)
     self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01"))
     self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02"))
     self.assertTrue(1.5 < res[3]['duration'] < 1.7)
     self.assertTrue(res[3]['stations'] == ['UH3', 'UH1'])
     self.assertTrue(res[3]['coincidence_sum'] == 2.0)
     # 6. same as 5, gappy stream
     # => same as 5 (almost, duration of 1 event changes by 0.02s)
     st2 = st.copy()
     tr1 = st2.pop(0)
     t1 = tr1.stats.starttime
     t2 = tr1.stats.endtime
     td = t2 - t1
     tr1a = tr1.slice(starttime=t1, endtime=t1 + 0.45 * td)
     tr1b = tr1.slice(starttime=t1 + 0.6 * td, endtime=t1 + 0.94 * td)
     st2.insert(1, tr1a)
     st2.insert(3, tr1b)
     res = coincidenceTrigger("recstalta", 2.5, 1, st2, 2,
                              trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'],
                              sta=0.3, lta=5)
     self.assertTrue(len(res) == 5)
     self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01"))
     self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02"))
     self.assertTrue(1.5 < res[3]['duration'] < 1.7)
     self.assertTrue(res[3]['stations'] == ['UH3', 'UH1'])
     self.assertTrue(res[3]['coincidence_sum'] == 2.0)
     # 7. same as 3 but modify input trace ids and check output of trace_ids
     # and other additional information with ``details=True``
     st2 = st.copy()
     st2[0].stats.network = "XX"
     st2[1].stats.location = "99"
     st2[1].stats.network = ""
     st2[1].stats.location = "99"
     st2[1].stats.channel = ""
     st2[2].stats.channel = "EHN"
     st2[3].stats.network = ""
     st2[3].stats.channel = ""
     st2[3].stats.station = ""
     trace_ids = {'XX.UH1..SHZ': 0.4, '.UH2.99.': 0.35,
                  'BW.UH3..EHN': 0.4, '...': 0.25}
     res = coincidenceTrigger("recstalta", 3.5, 1, st2, 1.0,
                              trace_ids=trace_ids, details=True,
                              sta=0.5, lta=10)
     self.assertTrue(len(res) == 3)
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', ''])
     self.assertTrue(res[0]['trace_ids'][0] == st2[2].id)
     self.assertTrue(res[0]['trace_ids'][1] == st2[1].id)
     self.assertTrue(res[0]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[0]['trace_ids'][3] == st2[3].id)
     self.assertTrue(res[0]['coincidence_sum'] == 1.4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['trace_ids'][0] == st2[1].id)
     self.assertTrue(res[1]['trace_ids'][1] == st2[2].id)
     self.assertTrue(res[1]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[1]['coincidence_sum'] == 1.15)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', ''])
     self.assertTrue(res[2]['trace_ids'][0] == st2[2].id)
     self.assertTrue(res[2]['trace_ids'][1] == st2[1].id)
     self.assertTrue(res[2]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[2]['trace_ids'][3] == st2[3].id)
     self.assertTrue(res[2]['coincidence_sum'] == 1.4)
     expected_keys = ['cft_peak_wmean', 'cft_std_wmean', 'cft_peaks',
                      'cft_stds']
     expected_types = [float, float, list, list]
     for item in res:
         for key, _type in zip(expected_keys, expected_types):
             self.assertTrue(key in item)
             self.assertTrue(isinstance(item[key], _type))
     # check some of the detailed info
     ev = res[-1]
     self.assertAlmostEquals(ev['cft_peak_wmean'], 18.097582068353855)
     self.assertAlmostEquals(ev['cft_std_wmean'], 4.7972436395074087)
     self.assertAlmostEquals(ev['cft_peaks'][0], 18.973097608513633)
     self.assertAlmostEquals(ev['cft_peaks'][1], 16.852175794415011)
     self.assertAlmostEquals(ev['cft_peaks'][2], 18.64005853900883)
     self.assertAlmostEquals(ev['cft_peaks'][3], 17.572363634564621)
     self.assertAlmostEquals(ev['cft_stds'][0], 4.8811165222946951)
     self.assertAlmostEquals(ev['cft_stds'][1], 4.4446373508521804)
     self.assertAlmostEquals(ev['cft_stds'][2], 5.3499401252675964)
     self.assertAlmostEquals(ev['cft_stds'][3], 4.2723814539487703)
Beispiel #10
0
def main():

    # Run Input Parser
    (opts, indb) = options.get_harmonics_options()

    # Load Database
    db = stdb.io.load_db(fname=indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(opts.stkeys) > 0:
        stkeys = []
        for skey in opts.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        datapath = 'DATA/' + stkey
        if not os.path.isdir(datapath):
            raise (Exception('Path to ' + datapath +
                             ' doesn`t exist - aborting'))

        # Get search start time
        if opts.startT is None:
            tstart = sta.startdate
        else:
            tstart = opts.startT

        # Get search end time
        if opts.endT is None:
            tend = sta.enddate
        else:
            tend = opts.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        rfRstream = Stream()
        rfTstream = Stream()

        for folder in os.listdir(datapath):

            date = folder.split('_')[0]
            year = date[0:4]
            month = date[4:6]
            day = date[6:8]
            dateUTC = UTCDateTime(year + '-' + month + '-' + day)

            if dateUTC > tstart and dateUTC < tend:

                file = open(datapath + "/" + folder + "/RF_Data.pkl", "rb")
                rfdata = pickle.load(file)
                rfRstream.append(rfdata)
                rfTstream.append(rfdata)
                file.close()

            else:
                continue

        # plotting.wiggle(rfRstream, sort='baz')

        # Try binning if specified
        if opts.nbin is not None:
            rf_tmp = binning.bin(rfRstream,
                                 rfTstream,
                                 typ='baz',
                                 nbin=opts.nbin + 1)
            rfRstream = rf_tmp[0]
            rfTstream = rf_tmp[1]

        # Filter original streams
        rfRstream.filter('bandpass',
                         freqmin=opts.freqs[0],
                         freqmax=opts.freqs[1],
                         corners=2,
                         zerophase=True)
        rfTstream.filter('bandpass',
                         freqmin=opts.freqs[0],
                         freqmax=opts.freqs[1],
                         corners=2,
                         zerophase=True)

        # Initialize the HkStack object
        harmonics = Harmonics(rfRstream, rfTstream)

        # Stack with or without dip
        if opts.find_azim:
            harmonics.dcomp_find_azim(xmin=opts.trange[0], xmax=opts.trange[1])
            print("Optimal azimuth for trange between "+\
                str(opts.trange[0])+" and "+str(opts.trange[1])+\
                    "is: "+str(harmonics.azim))
        else:
            harmonics.dcomp_fix_azim(azim=opts.azim)

        if opts.plot:
            harmonics.plot(opts.ymax, opts.scale, opts.save_plot, opts.title,
                           opts.form)

        if opts.save:
            filename = datapath + "/" + hkstack.hstream[0].stats.station + \
                ".harmonics.pkl"
            harmonics.save()
Beispiel #11
0
    # do the triggering
    trigger_list = []
    for tr in st_trigger:
        tr.stats.channel = "recstalta"
        max_len = PAR.MAXLEN * tr.stats.sampling_rate
        trigger_sample_list = triggerOnset(tr.data, PAR.ON, PAR.OFF, max_len=max_len)
        for on, off in trigger_sample_list:
             begin = tr.stats.starttime + float(on) / tr.stats.sampling_rate
             end = tr.stats.starttime + float(off) / tr.stats.sampling_rate
             trigger_list.append((begin.timestamp, end.timestamp, tr.stats.station))
    trigger_list.sort()

    # merge waveform and trigger stream for plotting
    # the normalizations are done because the triggers have a completely different
    # scale and would not be visible in the plot otherwise...
    st.filter("bandpass", freqmin=1.0, freqmax=20.0, corners=1, zerophase=True)
    st.normalize(global_max=False)
    st_trigger.normalize(global_max=True)
    st.extend(st_trigger)

    # coincidence part, work through sorted trigger list...
    mutt = ["mutt", "-s", "UH Alert  %s -- %s" % (T1, T2)]
    while len(trigger_list) > 1:
        on, off, sta = trigger_list[0]
        stations = set()
        stations.add(sta)
        for i in xrange(1, len(trigger_list)):
            tmp_on, tmp_off, tmp_sta = trigger_list[i]
            if tmp_on < off + PAR.ALLOWANCE:
                stations.add(tmp_sta)
                # allow sets of triggers that overlap only on subsets of all
Beispiel #12
0
 def test_coincidenceTrigger(self):
     """
     Test network coincidence trigger.
     """
     st = Stream()
     files = [
         "BW.UH1._.SHZ.D.2010.147.cut.slist.gz",
         "BW.UH2._.SHZ.D.2010.147.cut.slist.gz",
         "BW.UH3._.SHZ.D.2010.147.cut.slist.gz",
         "BW.UH4._.EHZ.D.2010.147.cut.slist.gz"
     ]
     for filename in files:
         filename = os.path.join(self.path, filename)
         st += read(filename)
     # some prefiltering used for UH network
     st.filter('bandpass', freqmin=10, freqmax=20)
     # 1. no weighting, no stations specified, good settings
     # => 3 events, no false triggers
     # for the first test we make some additional tests regarding types
     res = coincidenceTrigger("recstalta",
                              3.5,
                              1,
                              st.copy(),
                              3,
                              sta=0.5,
                              lta=10)
     self.assertTrue(isinstance(res, list))
     self.assertTrue(len(res) == 3)
     expected_keys = [
         'time', 'coincidence_sum', 'duration', 'stations', 'trace_ids'
     ]
     expected_types = [UTCDateTime, float, float, list, list]
     for item in res:
         self.assertTrue(isinstance(item, dict))
         for key, _type in zip(expected_keys, expected_types):
             self.assertTrue(key in item)
             self.assertTrue(isinstance(item[key], _type))
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[0]['coincidence_sum'] == 4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['coincidence_sum'] == 3)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[2]['coincidence_sum'] == 4)
     # 2. no weighting, station selection
     # => 2 events, no false triggers
     trace_ids = ['BW.UH1..SHZ', 'BW.UH3..SHZ', 'BW.UH4..EHZ']
     # ignore UserWarnings
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         re = coincidenceTrigger("recstalta",
                                 3.5,
                                 1,
                                 st.copy(),
                                 3,
                                 trace_ids=trace_ids,
                                 sta=0.5,
                                 lta=10)
         self.assertTrue(len(re) == 2)
         self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
         self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
         self.assertTrue(4.2 < re[0]['duration'] < 4.8)
         self.assertTrue(re[0]['stations'] == ['UH3', 'UH1', 'UH4'])
         self.assertTrue(re[0]['coincidence_sum'] == 3)
         self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
         self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
         self.assertTrue(4.2 < re[1]['duration'] < 4.4)
         self.assertTrue(re[1]['stations'] == ['UH3', 'UH1', 'UH4'])
         self.assertTrue(re[1]['coincidence_sum'] == 3)
     # 3. weighting, station selection
     # => 3 events, no false triggers
     trace_ids = {
         'BW.UH1..SHZ': 0.4,
         'BW.UH2..SHZ': 0.35,
         'BW.UH3..SHZ': 0.4,
         'BW.UH4..EHZ': 0.25
     }
     res = coincidenceTrigger("recstalta",
                              3.5,
                              1,
                              st.copy(),
                              1.0,
                              trace_ids=trace_ids,
                              sta=0.5,
                              lta=10)
     self.assertTrue(len(res) == 3)
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[0]['coincidence_sum'] == 1.4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['coincidence_sum'] == 1.15)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[2]['coincidence_sum'] == 1.4)
     # 4. weighting, station selection, max_len
     # => 2 events, no false triggers, small event does not overlap anymore
     trace_ids = {'BW.UH1..SHZ': 0.6, 'BW.UH2..SHZ': 0.6}
     # ignore UserWarnings
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         re = coincidenceTrigger("recstalta",
                                 3.5,
                                 1,
                                 st.copy(),
                                 1.2,
                                 trace_ids=trace_ids,
                                 max_trigger_length=0.13,
                                 sta=0.5,
                                 lta=10)
         self.assertTrue(len(re) == 2)
         self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
         self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
         self.assertTrue(0.2 < re[0]['duration'] < 0.3)
         self.assertTrue(re[0]['stations'] == ['UH2', 'UH1'])
         self.assertTrue(re[0]['coincidence_sum'] == 1.2)
         self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
         self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
         self.assertTrue(0.18 < re[1]['duration'] < 0.2)
         self.assertTrue(re[1]['stations'] == ['UH2', 'UH1'])
         self.assertTrue(re[1]['coincidence_sum'] == 1.2)
     # 5. station selection, extremely sensitive settings
     # => 4 events, 1 false triggers
     res = coincidenceTrigger("recstalta",
                              2.5,
                              1,
                              st.copy(),
                              2,
                              trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'],
                              sta=0.3,
                              lta=5)
     self.assertTrue(len(res) == 5)
     self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01"))
     self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02"))
     self.assertTrue(1.5 < res[3]['duration'] < 1.7)
     self.assertTrue(res[3]['stations'] == ['UH3', 'UH1'])
     self.assertTrue(res[3]['coincidence_sum'] == 2.0)
     # 6. same as 5, gappy stream
     # => same as 5 (almost, duration of 1 event changes by 0.02s)
     st2 = st.copy()
     tr1 = st2.pop(0)
     t1 = tr1.stats.starttime
     t2 = tr1.stats.endtime
     td = t2 - t1
     tr1a = tr1.slice(starttime=t1, endtime=t1 + 0.45 * td)
     tr1b = tr1.slice(starttime=t1 + 0.6 * td, endtime=t1 + 0.94 * td)
     st2.insert(1, tr1a)
     st2.insert(3, tr1b)
     res = coincidenceTrigger("recstalta",
                              2.5,
                              1,
                              st2,
                              2,
                              trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'],
                              sta=0.3,
                              lta=5)
     self.assertTrue(len(res) == 5)
     self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01"))
     self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02"))
     self.assertTrue(1.5 < res[3]['duration'] < 1.7)
     self.assertTrue(res[3]['stations'] == ['UH3', 'UH1'])
     self.assertTrue(res[3]['coincidence_sum'] == 2.0)
     # 7. same as 3 but modify input trace ids and check output of trace_ids
     # and other additional information with ``details=True``
     st2 = st.copy()
     st2[0].stats.network = "XX"
     st2[1].stats.location = "99"
     st2[1].stats.network = ""
     st2[1].stats.location = "99"
     st2[1].stats.channel = ""
     st2[2].stats.channel = "EHN"
     st2[3].stats.network = ""
     st2[3].stats.channel = ""
     st2[3].stats.station = ""
     trace_ids = {
         'XX.UH1..SHZ': 0.4,
         '.UH2.99.': 0.35,
         'BW.UH3..EHN': 0.4,
         '...': 0.25
     }
     res = coincidenceTrigger("recstalta",
                              3.5,
                              1,
                              st2,
                              1.0,
                              trace_ids=trace_ids,
                              details=True,
                              sta=0.5,
                              lta=10)
     self.assertTrue(len(res) == 3)
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', ''])
     self.assertTrue(res[0]['trace_ids'][0] == st2[2].id)
     self.assertTrue(res[0]['trace_ids'][1] == st2[1].id)
     self.assertTrue(res[0]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[0]['trace_ids'][3] == st2[3].id)
     self.assertTrue(res[0]['coincidence_sum'] == 1.4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['trace_ids'][0] == st2[1].id)
     self.assertTrue(res[1]['trace_ids'][1] == st2[2].id)
     self.assertTrue(res[1]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[1]['coincidence_sum'] == 1.15)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', ''])
     self.assertTrue(res[2]['trace_ids'][0] == st2[2].id)
     self.assertTrue(res[2]['trace_ids'][1] == st2[1].id)
     self.assertTrue(res[2]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[2]['trace_ids'][3] == st2[3].id)
     self.assertTrue(res[2]['coincidence_sum'] == 1.4)
     expected_keys = [
         'cft_peak_wmean', 'cft_std_wmean', 'cft_peaks', 'cft_stds'
     ]
     expected_types = [float, float, list, list]
     for item in res:
         for key, _type in zip(expected_keys, expected_types):
             self.assertTrue(key in item)
             self.assertTrue(isinstance(item[key], _type))
     # check some of the detailed info
     ev = res[-1]
     self.assertAlmostEquals(ev['cft_peak_wmean'], 18.097582068353855)
     self.assertAlmostEquals(ev['cft_std_wmean'], 4.7972436395074087)
     self.assertAlmostEquals(ev['cft_peaks'][0], 18.973097608513633)
     self.assertAlmostEquals(ev['cft_peaks'][1], 16.852175794415011)
     self.assertAlmostEquals(ev['cft_peaks'][2], 18.64005853900883)
     self.assertAlmostEquals(ev['cft_peaks'][3], 17.572363634564621)
     self.assertAlmostEquals(ev['cft_stds'][0], 4.8811165222946951)
     self.assertAlmostEquals(ev['cft_stds'][1], 4.4446373508521804)
     self.assertAlmostEquals(ev['cft_stds'][2], 5.3499401252675964)
     self.assertAlmostEquals(ev['cft_stds'][3], 4.2723814539487703)
Beispiel #13
0
def main():

    # Run Input Parser
    (opts, indb) = options.get_hk_options()

    # Load Database
    db = stdb.io.load_db(fname=indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(opts.stkeys) > 0:
        stkeys = []
        for skey in opts.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        datapath = 'DATA/' + stkey
        if not os.path.isdir(datapath):
            raise (Exception('Path to ' + datapath +
                             ' doesn`t exist - aborting'))

        # Get search start time
        if opts.startT is None:
            tstart = sta.startdate
        else:
            tstart = opts.startT

        # Get search end time
        if opts.endT is None:
            tend = sta.enddate
        else:
            tend = opts.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        rfRstream = Stream()

        for folder in os.listdir(datapath):

            date = folder.split('_')[0]
            year = date[0:4]
            month = date[4:6]
            day = date[6:8]
            dateUTC = UTCDateTime(year + '-' + month + '-' + day)

            if dateUTC > tstart and dateUTC < tend:

                file = open(datapath + "/" + folder + "/RF_Data.pkl", "rb")
                rfdata = pickle.load(file)
                rfRstream.append(rfdata)
                file.close()

            else:
                continue

        # plotting.wiggle(rfRstream, sort='baz')

        # Try binning if specified
        if opts.nbin is not None:
            rf_tmp = binning.bin(rfRstream, typ='slow', nbin=opts.nbin + 1)
            rfRstream = rf_tmp[0]

        # Get a copy of the radial component and filter
        if opts.copy:
            rfRstream_copy = rfRstream.copy()
            rfRstream_copy.filter('bandpass',
                                  freqmin=opts.freqs_copy[0],
                                  freqmax=opts.freqs_copy[1],
                                  corners=2,
                                  zerophase=True)

        # Filter original stream
        rfRstream.filter('bandpass',
                         freqmin=opts.freqs[0],
                         freqmax=opts.freqs[1],
                         corners=2,
                         zerophase=True)

        # Initialize the HkStack object
        try:
            hkstack = HkStack(rfRstream,
                              rfV2=rfRstream_copy,
                              strike=opts.strike,
                              dip=opts.dip,
                              vp=opts.vp)
        except:
            hkstack = HkStack(rfRstream,
                              strike=opts.strike,
                              dip=opts.dip,
                              vp=opts.vp)

        # Update attributes
        hkstack.hbound = opts.hbound
        hkstack.kbound = opts.kbound
        hkstack.dh = opts.dh
        hkstack.dk = opts.dk
        hkstack.weights = opts.weights

        # Stack with or without dip
        if opts.calc_dip:
            hkstack.stack_dip()
        else:
            hkstack.stack()

        # Average stacks
        hkstack.average(typ=opts.typ)

        if opts.plot:
            hkstack.plot(opts.save_plot, opts.title, opts.form)

        if opts.save:
            filename = datapath + "/" + hkstack.hstream[0].stats.station + \
                ".hkstack.pkl"
            hkstack.save(file=filename)
Beispiel #14
0
        max_len = PAR.MAXLEN * tr.stats.sampling_rate
        trigger_sample_list = triggerOnset(tr.data,
                                           PAR.ON,
                                           PAR.OFF,
                                           max_len=max_len)
        for on, off in trigger_sample_list:
            begin = tr.stats.starttime + float(on) / tr.stats.sampling_rate
            end = tr.stats.starttime + float(off) / tr.stats.sampling_rate
            trigger_list.append(
                (begin.timestamp, end.timestamp, tr.stats.station))
    trigger_list.sort()

    # merge waveform and trigger stream for plotting
    # the normalizations are done because the triggers have a completely different
    # scale and would not be visible in the plot otherwise...
    st.filter("bandpass", freqmin=1.0, freqmax=20.0, corners=1, zerophase=True)
    st.normalize(global_max=False)
    st_trigger.normalize(global_max=True)
    st.extend(st_trigger)

    # coincidence part, work through sorted trigger list...
    mutt = ["mutt", "-s", "UH Alert  %s -- %s" % (T1, T2)]
    while len(trigger_list) > 1:
        on, off, sta = trigger_list[0]
        stations = set()
        stations.add(sta)
        for i in xrange(1, len(trigger_list)):
            tmp_on, tmp_off, tmp_sta = trigger_list[i]
            if tmp_on < off + PAR.ALLOWANCE:
                stations.add(tmp_sta)
                # allow sets of triggers that overlap only on subsets of all
Beispiel #15
0
if exceptions:
    summary.append("#" * 33 + " Exceptions  " + "#" * 33)
    summary += exceptions
summary.append("#" * 79)

trig = []
mutt = []
if st:
    # preprocessing, backup original data for plotting at end
    st.merge(0)
    st.detrend("linear")
    for tr in st:
        tr.data = tr.data * cosTaper(len(tr), 0.01)
    #st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False)
    st.sort()
    st.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True)
    st.trim(T1, T2)
    st_trigger = st.copy()
    st.normalize(global_max=False)
    # do the triggering
    trig = coincidenceTrigger("recstalta", PAR.ON, PAR.OFF, st_trigger,
            thr_coincidence_sum=PAR.MIN_STATIONS,
            max_trigger_length=PAR.MAXLEN, trigger_off_extension=PAR.ALLOWANCE,
            details=True, sta=PAR.STA, lta=PAR.LTA)

    for t in trig:
        info = "%s %ss %s %s" % (t['time'].strftime("%Y-%m-%dT%H:%M:%S"), ("%.1f" % t['duration']).rjust(4), ("%i" % t['cft_peak_wmean']).rjust(3), "-".join(t['stations']))
        summary.append(info)
        tmp = st.slice(t['time'] - 1, t['time'] + t['duration'])
        outfilename = "%s/%s_%.1f_%i_%s-%s_%s.png" % (PLOTDIR, t['time'].strftime("%Y-%m-%dT%H:%M:%S"), t['duration'], t['cft_peak_wmean'], len(t['stations']), num_stations, "-".join(t['stations']))
        tmp.plot(outfile=outfilename)
Beispiel #16
0
t2 = t + 4 * 3600

stations = ["AIGLE", "SENIN", "DIX", "LAUCH", "MMK", "SIMPL"]
st = Stream()

for station in stations:
    try:
        tmp = client.getWaveform("CH", station, "", "[EH]HZ", t, t2,
                                 metadata=True)
    except:
        print(station, "---")
        continue
    st += tmp

st.taper()
st.filter("bandpass", freqmin=1, freqmax=20)
triglist = coincidenceTrigger("recstalta", 10, 2, st, 4, sta=0.5, lta=10)
print(len(triglist), "events triggered.")

for trig in triglist:
    closest_sta = trig['stations'][0]
    tr = st.select(station=closest_sta)[0]
    trig['latitude'] = tr.stats.coordinates.latitude
    trig['longitude'] = tr.stats.coordinates.longitude

paz_wa = {'sensitivity': 2800, 'zeros': [0j], 'gain': 1,
          'poles': [-6.2832 - 4.7124j, -6.2832 + 4.7124j]}

for trig in triglist:
    t = trig['time']
    print("#" * 80)
Beispiel #17
0
#st += client.get_waveforms(net, sta, "00","LH*", stime, etime)
#st += client.get_waveforms(net, sta, "30","LDO", stime, etime)
st.detrend('constant')
st.merge(fill_value=0)
st.decimate(5)
st.decimate(2)
st.decimate(6)
st.decimate(6)
# Convert to velocity
st.attach_response(inv)

# We now have the data and the metadata so we should rotate it and do particle motion

st.rotate(method="->ZNE", inventory=inv)
st.select(channel="LH*").remove_response(output='ACC')
st.filter('bandpass', freqmin=0.03 / 1000., freqmax=0.04 / 1000.)
st.taper(0.05)
if debug:
    print(st)

fig = plt.figure(1, figsize=(12, 12))
plt.subplots_adjust(hspace=0.001)
for idx, chan in enumerate(['LHZ', 'LHN', 'LHE']):
    stT = st.select(channel=chan)
    t = np.arange(len(stT[0].data)) * 360. / (24. * 60. * 60.)
    ax1 = plt.subplot(3, 1, idx + 1)
    for tr in stT:
        ax1.plot(t, tr.data * 10**9, label=tr.stats.location)
    ax1.text(1., .9 * max(tr.data * 10**9), chan)
    ax2 = ax1.twinx()
    ax2.plot(t,