def main(): print() print( "################################################################################" ) print( "# __ _ _ #" ) print( "# _ __ / _|_ __ _ _ | |__ __ _ _ __ _ __ ___ ___ _ __ (_) ___ ___ #" ) print( "# | '__| |_| '_ \| | | | | '_ \ / _` | '__| '_ ` _ \ / _ \| '_ \| |/ __/ __| #" ) print( "# | | | _| |_) | |_| | | | | | (_| | | | | | | | | (_) | | | | | (__\__ \ #" ) print( "# |_| |_| | .__/ \__, |___|_| |_|\__,_|_| |_| |_| |_|\___/|_| |_|_|\___|___/ #" ) print( "# |_| |___/_____| #" ) print( "# #" ) print( "################################################################################" ) print() # Run Input Parser args = get_harmonics_arguments() # Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders procfold = [] # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Construct Folder Name stfld = stkey if not args.lkey: stfld = stkey.split('.')[0] + "." + stkey.split('.')[1] # Define path to see if it exists if args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA') / stfld elif args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stfld if not datapath.is_dir(): print('Path to ' + str(datapath) + ' doesn`t exist - continuing') continue # Get search start time if args.startT is None: tstart = sta.startdate else: tstart = args.startT # Get search end time if args.endT is None: tend = sta.enddate else: tend = args.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print(" ") print("|===============================================|") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d %H:%M:%S"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d %H:%M:%S"))) print("|-----------------------------------------------|") # Check for folder already processed if stfld in procfold: print(' {0} already processed...skipping '.format(stfld)) continue rfRstream = Stream() rfTstream = Stream() datafiles = [x for x in datapath.iterdir() if x.is_dir()] for folder in datafiles: # Skip hidden folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year + '-' + month + '-' + day) if dateUTC > tstart and dateUTC < tend: filename = folder / "RF_Data.pkl" if filename.is_file(): file = open(filename, "rb") rfdata = pickle.load(file) if rfdata[0].stats.snrh > args.snrh and \ rfdata[0].stats.snr and \ rfdata[0].stats.cc > args.cc: rfRstream.append(rfdata[1]) rfTstream.append(rfdata[2]) file.close() else: continue if args.no_outl: # Remove outliers wrt variance varR = np.array([np.var(tr.data) for tr in rfRstream]) # Calculate outliers medvarR = np.median(varR) madvarR = 1.4826 * np.median(np.abs(varR - medvarR)) robustR = np.abs((varR - medvarR) / madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) rfTstream.remove(rfTstream[i]) # Do the same for transverse varT = np.array([np.var(tr.data) for tr in rfTstream]) medvarT = np.median(varT) madvarT = 1.4826 * np.median(np.abs(varT - medvarT)) robustT = np.abs((varT - medvarT) / madvarT) outliersT = np.arange(len(rfTstream))[robustT > 2.] for i in outliersT[::-1]: rfRstream.remove(rfRstream[i]) rfTstream.remove(rfTstream[i]) # Try binning if specified if args.nbin is not None: rf_tmp = binning.bin(rfRstream, rfTstream, typ='baz', nbin=args.nbin + 1) rfRstream = rf_tmp[0] rfTstream = rf_tmp[1] # Filter original streams rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) rfTstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the Harmonics object harmonics = Harmonics(rfRstream, rfTstream) # Stack with or without dip if args.find_azim: harmonics.dcomp_find_azim(xmin=args.trange[0], xmax=args.trange[1]) print("Optimal azimuth for trange between " + str(args.trange[0]) + " and " + str(args.trange[1]) + " seconds is: " + str(harmonics.azim)) else: harmonics.dcomp_fix_azim(azim=args.azim) if args.save_plot and not Path('FIGURES').is_dir(): Path('FIGURES').mkdir(parents=True) if args.plot: harmonics.plot(args.ymax, args.scale, args.save_plot, args.title, args.form) if args.save: filename = datapath / (harmonics.hstream[0].stats.station + ".harmonics.pkl") harmonics.save(filename) # Update processed folders procfold.append(stfld)
def main(): print() print("#################################################") print("# __ _ _ #") print("# _ __ / _|_ __ _ _ _ __ | | ___ | |_ #") print("# | '__| |_| '_ \| | | | | '_ \| |/ _ \| __| #") print("# | | | _| |_) | |_| | | |_) | | (_) | |_ #") print("# |_| |_| | .__/ \__, |____| .__/|_|\___/ \__| #") print("# |_| |___/_____|_| #") print("# #") print("#################################################") print() # Run Input Parser args = arguments.get_plot_arguments() # Load Database db = stdb.io.load_db(fname=args.indb) # Construct station key loop allkeys = db.keys() # Extract key subset if len(args.stkeys) > 0: stkeys = [] for skey in args.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Define path to see if it exists if args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA') / stkey elif args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stkey if not datapath.is_dir(): print('Path to ' + str(datapath) + ' doesn`t exist - continuing') continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("|-----------------------------------------------|") rfRstream = Stream() rfTstream = Stream() datafiles = [x for x in datapath.iterdir() if x.is_dir()] for folder in datafiles: # Skip hidden folders if folder.name.startswith('.'): continue # Load meta data filename = folder / "Meta_Data.pkl" if not filename.is_file(): continue metafile = open(filename, 'rb') meta = pickle.load(metafile) metafile.close() # Skip data not in list of phases if meta.phase not in args.listphase: continue # QC Thresholding if meta.snrh < args.snrh: continue if meta.snr < args.snr: continue if meta.cc < args.cc: continue # Check bounds on data if meta.slow < args.slowbound[0] or meta.slow > args.slowbound[1]: continue if meta.baz < args.bazbound[0] or meta.baz > args.bazbound[1]: continue # If everything passed, load the RF data filename = folder / "RF_Data.pkl" if filename.is_file(): file = open(filename, "rb") rfdata = pickle.load(file) if args.phase in ['P', 'PP', 'allP']: Rcmp = 1 Tcmp = 2 elif args.phase in ['S', 'SKS', 'allS']: Rcmp = 1 Tcmp = 2 rfRstream.append(rfdata[Rcmp]) rfTstream.append(rfdata[Tcmp]) file.close() if len(rfRstream) == 0: continue if args.no_outl: varR = [] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis > args.trange[0]) & (taxis < args.trange[1]) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers wrt variance within time range medvarR = np.median(varR) madvarR = 1.4826 * np.median(np.abs(varR - medvarR)) robustR = np.abs((varR - medvarR) / madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) rfTstream.remove(rfTstream[i]) # Do the same for transverse varT = [] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis > args.trange[0]) & (taxis < args.trange[1]) varT.append(np.var(rfTstream[i].data[tselect])) varT = np.array(varT) medvarT = np.median(varT) madvarT = 1.4826 * np.median(np.abs(varT - medvarT)) robustT = np.abs((varT - medvarT) / madvarT) outliersT = np.arange(len(rfTstream))[robustT > 2.5] for i in outliersT[::-1]: rfRstream.remove(rfRstream[i]) rfTstream.remove(rfTstream[i]) else: taxis = rfRstream[0].stats.taxis # Filter if args.bp: rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) rfTstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) if args.saveplot and not Path('RF_PLOTS').is_dir(): Path('RF_PLOTS').mkdir(parents=True) print('') print("Number of radial RF data: " + str(len(rfRstream))) print("Number of transverse RF data: " + str(len(rfTstream))) print('') if args.nbaz: # Bin according to BAZ rf_tmp = binning.bin(rfRstream, rfTstream, typ='baz', nbin=args.nbaz + 1, pws=args.pws) print(rfRstream) elif args.nslow: # Bin according to slowness rf_tmp = binning.bin(rfRstream, rfTstream, typ='slow', nbin=args.nslow + 1, pws=args.pws) # Check bin counts: for tr in rf_tmp[0]: if (tr.stats.nbin < args.binlim): rf_tmp[0].remove(tr) for tr in rf_tmp[1]: if (tr.stats.nbin < args.binlim): rf_tmp[1].remove(tr) # Show a stacked trace on top OR normalize option specified if args.stacked or args.norm: st_tmp = binning.bin_all(rf_tmp[0], rf_tmp[1], pws=args.pws) tr1 = st_tmp[0] tr2 = st_tmp[1] if args.norm: # Find normalization constant # st_tmp = binning.bin_all(rf_tmp[0], rf_tmp[1], pws=args.pws) # tr1 = st_tmp[0] # tr2 = st_tmp[1] # tmp1 = tr1.data[(taxis > args.trange[0]) & ( # taxis < args.trange[1])] # tmp2 = tr2.data[(taxis > args.trange[0]) & ( # taxis < args.trange[1])] # normR = np.amax(np.abs(tmp1)) # normT = np.amax(np.abs(tmp2)) # norm = np.max([normR, normT]) tmp1 = np.array([ tr.data[(taxis > args.trange[0]) & (taxis < args.trange[1])] for tr in rf_tmp[0] ]) tmp2 = np.array([ tr.data[(taxis > args.trange[0]) & (taxis < args.trange[1])] for tr in rf_tmp[1] ]) normR = np.amax(np.abs(tmp1)) normT = np.amax(np.abs(tmp2)) norm = np.max([normR, normT]) else: norm = None else: norm = None tr1 = None tr2 = None # Now plot if args.nbaz: plotting.wiggle_bins(rf_tmp[0], rf_tmp[1], tr1=tr1, tr2=tr2, btyp='baz', scale=args.scale, tmin=args.trange[0], tmax=args.trange[1], norm=norm, save=args.saveplot, title=args.titleplot, form=args.form) elif args.nslow: plotting.wiggle_bins(rf_tmp[0], rf_tmp[1], tr1=tr1, tr2=tr2, btyp='slow', scale=args.scale, tmin=args.trange[0], tmax=args.trange[1], norm=norm, save=args.saveplot, title=args.titleplot, form=args.form) # Event distribution if args.plot_event_dist: plotting.event_dist(rfRstream, phase=args.phase, save=args.saveplot, title=args.titleplot, form=args.form)
def main(): print() print("#########################################") print("# __ _ _ #") print("# _ __ / _|_ __ _ _ | |__ | | __ #") print("# | '__| |_| '_ \| | | | | '_ \| |/ / #") print("# | | | _| |_) | |_| | | | | | < #") print("# |_| |_| | .__/ \__, |___|_| |_|_|\_\ #") print("# |_| |___/_____| #") print("# #") print("#########################################") print() # Run Input Parser args = arguments.get_hk_arguments() # Load Database db = stdb.io.load_db(fname=args.indb) # Construct station key loop allkeys = db.keys() sorted(allkeys) # Extract key subset if len(args.stkeys) > 0: stkeys = [] for skey in args.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() sorted(stkeys) # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Define path to see if it exists if args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA') / stkey elif args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stkey if not datapath.is_dir(): print('Path to ' + str(datapath) + ' doesn`t exist - continuing') continue # Define save path if args.save: savepath = Path('HK_DATA') / stkey if not savepath.is_dir(): print('Path to ' + str(savepath) + ' doesn`t exist - creating it') savepath.mkdir(parents=True) # Get search start time if args.startT is None: tstart = sta.startdate else: tstart = args.startT # Get search end time if args.endT is None: tend = sta.enddate else: tend = args.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print(" ") print("|===============================================|") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d %H:%M:%S"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d %H:%M:%S"))) print("|-----------------------------------------------|") rfRstream = Stream() datafiles = [x for x in datapath.iterdir() if x.is_dir()] for folder in datafiles: # Skip hidden folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year + '-' + month + '-' + day) if dateUTC > tstart and dateUTC < tend: # Load meta data metafile = folder / "Meta_Data.pkl" if not metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb')) # Skip data not in list of phases if meta.phase not in args.listphase: continue # QC Thresholding if meta.snrh < args.snrh: continue if meta.snr < args.snr: continue if meta.cc < args.cc: continue # # Check bounds on data # if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]: # continue # if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]: # continue # If everything passed, load the RF data filename = folder / "RF_Data.pkl" if filename.is_file(): file = open(filename, "rb") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream) == 0: continue if args.no_outl: t1 = 0. t2 = 30. varR = [] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis > t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers wrt variance within time range medvarR = np.median(varR) madvarR = 1.4826 * np.median(np.abs(varR - medvarR)) robustR = np.abs((varR - medvarR) / madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print("Number of radial RF data: " + str(len(rfRstream))) print('') # Try binning if specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz + 1, nslow=args.nslow + 1, pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow + 1, pws=args.pws) rfRstream = rf_tmp[0] # Get a copy of the radial component and filter if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin counts: for tr in rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue if stream is too short if len(rfRstream) < 5: continue if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print("Number of radial RF bins: " + str(len(rfRstream))) print('') # Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound = args.hbound hkstack.kbound = args.kbound hkstack.dh = args.dh hkstack.dk = args.dk hkstack.weights = args.weights # Stack with or without dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename = savepath / (hkstack.rfV1[0].stats.station + \ ".hkstack."+args.typ+".pkl") hkstack.save(file=filename)
def main(): # Run Input Parser args = arguments.get_lkss_arguments() # Load Database db = stdb.io.load_db(fname=args.indb) # Construct station key loop allkeys = db.keys() # Extract key subset if len(args.stkeys) > 0: stkeys = [] for skey in args.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Define path to see if it exists datapath = 'P_DATA/' + stkey if not os.path.isdir(datapath): print('Path to ' + datapath + ' doesn`t exist - continuing') continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print(" ") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("|-----------------------------------------------|") rfRstream = Stream() rfTstream = Stream() for folder in os.listdir(datapath): filename = datapath + "/" + folder + "/RF_Data.pkl" if os.path.isfile(filename): file = open(filename, "rb") rfdata = pickle.load(file) if rfdata[0].stats.snr > args.snr and \ rfdata[0].stats.cc > args.cc: rfRstream.append(rfdata[1]) rfTstream.append(rfdata[2]) file.close() if len(rfRstream) == 0: continue if args.no_outl: # Remove outliers wrt variance varR = np.array([np.var(tr.data) for tr in rfRstream]) medvarR = np.median(varR) madvarR = 1.4826 * np.median(np.abs(varR - medvarR)) robustR = np.abs((varR - medvarR) / madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) rfTstream.remove(rfTstream[i]) # Do the same for transverse varT = np.array([np.var(tr.data) for tr in rfTstream]) medvarT = np.median(varT) madvarT = 1.4826 * np.median(np.abs(varT - medvarT)) robustT = np.abs((varT - medvarT) / madvarT) outliersT = np.arange(len(rfTstream))[robustT > 2.5] for i in outliersT[::-1]: rfRstream.remove(rfRstream[i]) rfTstream.remove(rfTstream[i]) if args.bp: # Filter rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) rfTstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Binning rf_tmp = binning.bin(rfRstream, rfTstream, typ='baz', nbin=args.nbaz + 1, pws=args.pws) azcorr, *_ = utils.decompose(rf_tmp[0], rf_tmp[1], args.trange[0], args.trange[1], plot_f=args.plot_f, plot_comps=args.plot_comps) print("Best fit azcorr: " + "{0:5.1f}".format(azcorr)) # Bootstrap statistics? if args.boot: azcorr, err_azcorr = utils.get_bootstrap(rf_tmp[0], rf_tmp[1], args.trange[0], args.trange[1], plot_hist=True) print("Bootstrap azcorr and uncertainty: " + "{0:5.1f}, {1:5.1f}".format(azcorr, err_azcorr))
def main(): # Run Input Parser (opts, indb) = options.get_harmonics_options() # Load Database db = stdb.io.load_db(fname=indb) # Construct station key loop allkeys = db.keys() sorted(allkeys) # Extract key subset if len(opts.stkeys) > 0: stkeys = [] for skey in opts.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() sorted(stkeys) # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Define path to see if it exists datapath = 'DATA/' + stkey if not os.path.isdir(datapath): raise (Exception('Path to ' + datapath + ' doesn`t exist - aborting')) # Get search start time if opts.startT is None: tstart = sta.startdate else: tstart = opts.startT # Get search end time if opts.endT is None: tend = sta.enddate else: tend = opts.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print(" ") print("|===============================================|") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d %H:%M:%S"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d %H:%M:%S"))) print("|-----------------------------------------------|") rfRstream = Stream() rfTstream = Stream() for folder in os.listdir(datapath): date = folder.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year + '-' + month + '-' + day) if dateUTC > tstart and dateUTC < tend: file = open(datapath + "/" + folder + "/RF_Data.pkl", "rb") rfdata = pickle.load(file) rfRstream.append(rfdata) rfTstream.append(rfdata) file.close() else: continue # plotting.wiggle(rfRstream, sort='baz') # Try binning if specified if opts.nbin is not None: rf_tmp = binning.bin(rfRstream, rfTstream, typ='baz', nbin=opts.nbin + 1) rfRstream = rf_tmp[0] rfTstream = rf_tmp[1] # Filter original streams rfRstream.filter('bandpass', freqmin=opts.freqs[0], freqmax=opts.freqs[1], corners=2, zerophase=True) rfTstream.filter('bandpass', freqmin=opts.freqs[0], freqmax=opts.freqs[1], corners=2, zerophase=True) # Initialize the HkStack object harmonics = Harmonics(rfRstream, rfTstream) # Stack with or without dip if opts.find_azim: harmonics.dcomp_find_azim(xmin=opts.trange[0], xmax=opts.trange[1]) print("Optimal azimuth for trange between "+\ str(opts.trange[0])+" and "+str(opts.trange[1])+\ "is: "+str(harmonics.azim)) else: harmonics.dcomp_fix_azim(azim=opts.azim) if opts.plot: harmonics.plot(opts.ymax, opts.scale, opts.save_plot, opts.title, opts.form) if opts.save: filename = datapath + "/" + hkstack.hstream[0].stats.station + \ ".harmonics.pkl" harmonics.save()
def main(): # Run Input Parser (opts, indb) = options.get_hk_options() # Load Database db = stdb.io.load_db(fname=indb) # Construct station key loop allkeys = db.keys() sorted(allkeys) # Extract key subset if len(opts.stkeys) > 0: stkeys = [] for skey in opts.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() sorted(stkeys) # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Define path to see if it exists datapath = 'DATA/' + stkey if not os.path.isdir(datapath): raise (Exception('Path to ' + datapath + ' doesn`t exist - aborting')) # Get search start time if opts.startT is None: tstart = sta.startdate else: tstart = opts.startT # Get search end time if opts.endT is None: tend = sta.enddate else: tend = opts.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print(" ") print("|===============================================|") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d %H:%M:%S"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d %H:%M:%S"))) print("|-----------------------------------------------|") rfRstream = Stream() for folder in os.listdir(datapath): date = folder.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year + '-' + month + '-' + day) if dateUTC > tstart and dateUTC < tend: file = open(datapath + "/" + folder + "/RF_Data.pkl", "rb") rfdata = pickle.load(file) rfRstream.append(rfdata) file.close() else: continue # plotting.wiggle(rfRstream, sort='baz') # Try binning if specified if opts.nbin is not None: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=opts.nbin + 1) rfRstream = rf_tmp[0] # Get a copy of the radial component and filter if opts.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=opts.freqs_copy[0], freqmax=opts.freqs_copy[1], corners=2, zerophase=True) # Filter original stream rfRstream.filter('bandpass', freqmin=opts.freqs[0], freqmax=opts.freqs[1], corners=2, zerophase=True) # Initialize the HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=opts.strike, dip=opts.dip, vp=opts.vp) except: hkstack = HkStack(rfRstream, strike=opts.strike, dip=opts.dip, vp=opts.vp) # Update attributes hkstack.hbound = opts.hbound hkstack.kbound = opts.kbound hkstack.dh = opts.dh hkstack.dk = opts.dk hkstack.weights = opts.weights # Stack with or without dip if opts.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=opts.typ) if opts.plot: hkstack.plot(opts.save_plot, opts.title, opts.form) if opts.save: filename = datapath + "/" + hkstack.hstream[0].stats.station + \ ".hkstack.pkl" hkstack.save(file=filename)
def main(): print() print( "################################################################################" ) print( "# __ _ _ #" ) print( "# _ __ / _|_ __ _ _ | |__ __ _ _ __ _ __ ___ ___ _ __ (_) ___ ___ #" ) print( "# | '__| |_| '_ \| | | | | '_ \ / _` | '__| '_ ` _ \ / _ \| '_ \| |/ __/ __| #" ) print( "# | | | _| |_) | |_| | | | | | (_| | | | | | | | | (_) | | | | | (__\__ \ #" ) print( "# |_| |_| | .__/ \__, |___|_| |_|\__,_|_| |_| |_| |_|\___/|_| |_|_|\___|___/ #" ) print( "# |_| |___/_____| #" ) print( "# #" ) print( "################################################################################" ) print() # Run Input Parser (opts, indb) = options.get_harmonics_options() # Load Database db = stdb.io.load_db(fname=indb) # Construct station key loop allkeys = db.keys() sorted(allkeys) # Extract key subset if len(opts.stkeys) > 0: stkeys = [] for skey in opts.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() sorted(stkeys) # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Define path to see if it exists datapath = 'DATA/' + stkey if not os.path.isdir(datapath): raise (Exception('Path to ' + datapath + ' doesn`t exist - aborting')) # Get search start time if opts.startT is None: tstart = sta.startdate else: tstart = opts.startT # Get search end time if opts.endT is None: tend = sta.enddate else: tend = opts.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print(" ") print("|===============================================|") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d %H:%M:%S"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d %H:%M:%S"))) print("|-----------------------------------------------|") rfRstream = Stream() rfTstream = Stream() for folder in os.listdir(datapath): # Skip hidden folders if folder.startswith('.'): continue date = folder.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year + '-' + month + '-' + day) if dateUTC > tstart and dateUTC < tend: filename = datapath + "/" + folder + "/RF_Data.pkl" if os.path.isfile(filename): file = open(filename, "rb") rfdata = pickle.load(file) if rfdata[0].stats.snrh > opts.snrh and rfdata[0].stats.snr and \ rfdata[0].stats.cc > opts.cc: rfRstream.append(rfdata[1]) rfTstream.append(rfdata[2]) file.close() else: continue if opts.no_outl: # Remove outliers wrt variance varR = np.array([np.var(tr.data) for tr in rfRstream]) # Calculate outliers medvarR = np.median(varR) madvarR = 1.4826 * np.median(np.abs(varR - medvarR)) robustR = np.abs((varR - medvarR) / madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) rfTstream.remove(rfTstream[i]) # Do the same for transverse varT = np.array([np.var(tr.data) for tr in rfTstream]) medvarT = np.median(varT) madvarT = 1.4826 * np.median(np.abs(varT - medvarT)) robustT = np.abs((varT - medvarT) / madvarT) outliersT = np.arange(len(rfTstream))[robustT > 2.] for i in outliersT[::-1]: rfRstream.remove(rfRstream[i]) rfTstream.remove(rfTstream[i]) # Try binning if specified if opts.nbin is not None: rf_tmp = binning.bin(rfRstream, rfTstream, typ='baz', nbin=opts.nbin + 1) rfRstream = rf_tmp[0] rfTstream = rf_tmp[1] # Filter original streams rfRstream.filter('bandpass', freqmin=opts.bp[0], freqmax=opts.bp[1], corners=2, zerophase=True) rfTstream.filter('bandpass', freqmin=opts.bp[0], freqmax=opts.bp[1], corners=2, zerophase=True) # Initialize the HkStack object harmonics = Harmonics(rfRstream, rfTstream) # Stack with or without dip if opts.find_azim: harmonics.dcomp_find_azim(xmin=opts.trange[0], xmax=opts.trange[1]) print("Optimal azimuth for trange between "+\ str(opts.trange[0])+" and "+str(opts.trange[1])+\ "is: "+str(harmonics.azim)) else: harmonics.dcomp_fix_azim(azim=opts.azim) if opts.plot: harmonics.plot(opts.ymax, opts.scale, opts.save_plot, opts.title, opts.form) if opts.save: filename = datapath + "/" + hkstack.hstream[0].stats.station + \ ".harmonics.pkl" harmonics.save()