def main(): # Run the Input Parser args = arguments.get_dl_calc_arguments() # Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Loop over station keys for stkey in list(stkeys): sta = db[stkey] # Output directory outdir = Path(args.saveloc) / Path(stkey.upper()) if not outdir.exists(): outdir.mkdir(parents=True) # Establish client for catalogue if args.verb > 1: print(" Establishing Catalogue Client...") cat_client = Client(args.cat_client) if args.verb > 1: print(" Done") # Establish client for waveforms if args.verb > 1: print(" Establishing Waveform Client...") if len(args.UserAuth) == 0: wf_client = Client(args.wf_client) else: wf_client = Client(args.wf_client, user=args.UserAuth[0], password=args.UserAuth[1]) if args.verb > 1: print(" Done") print(" ") # Get catalogue search start time if args.startT is None: tstart = sta.startdate else: tstart = args.startT # Get catalogue search end time if args.endT is None: tend = sta.enddate else: tend = args.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display if args.verb > 1: print("|==============================================|") print("| {0:>8s} |".format( sta.station)) print("|==============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d %H:%M:%S"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d %H:%M:%S"))) print("| Output Directory: ", args.saveloc) print("| Save Progress: ", args.constsave) print("|----------------------------------------------|") print("| Searching Possible events: |") print("| Start: {0:19s} |".format( tstart.strftime("%Y-%m-%d %H:%M:%S"))) print("| End: {0:19s} |".format( tend.strftime("%Y-%m-%d %H:%M:%S"))) print("| Mag: >{0:3.1f}", format(args.minmag) + " |") print("| Min Distance: {0:.1f}".format(args.mindist)) print("| Max Distance: {0:.1f}".format(args.maxdist)) print("| Max Depth: {0:.1f}".format(args.maxdep)) # Retrieve Event Catalogue if args.verb > 1: print("| Request Event Catalogue... |") print("| ... |") try: cat = cat_client.get_events(starttime=tstart, endtime=tend, minmagnitude=args.minmag) # get index of repeat events, save for later reps = np.unique(utils.catclean(cat)) except: raise(Exception(" Fatal Error: Cannot download Catalogue")) if args.verb > 1: print("| Retrieved {0} events ".format(len(cat.events))) print() for i, ev in enumerate(cat): if i in reps: continue # Initialize BNGData object with station info dldata = DL(sta) # Add event to object accept = dldata.add_event( ev, gacmin=args.mindist, gacmax=args.maxdist, depmax=args.maxdep, returned=True) # Define time stamp yr = str(dldata.meta.time.year).zfill(4) jd = str(dldata.meta.time.julday).zfill(3) hr = str(dldata.meta.time.hour).zfill(2) # If event is accepted (data exists) if accept: # Display Event Info print(" ") print("**************************************************") print("* ({0:d}/{1:d}): {2:13s} {3}".format( i+1, len(cat), dldata.meta.time.strftime( "%Y%m%d_%H%M%S"), stkey)) if args.verb > 1: print("* Origin Time: " + dldata.meta.time.strftime("%Y-%m-%d %H:%M:%S")) print( "* Lat: {0:6.2f}; Lon: {1:7.2f}".format( dldata.meta.lat, dldata.meta.lon)) print( "* Dep: {0:6.2f} km; Mag: {1:3.1f}".format( dldata.meta.dep, dldata.meta.mag)) print("* Dist: {0:7.2f} km;".format(dldata.meta.epi_dist) + " Epi dist: {0:6.2f} deg\n".format(dldata.meta.gac) + "* Baz: {0:6.2f} deg;".format(dldata.meta.baz) + " Az: {0:6.2f} deg".format(dldata.meta.az)) # Event Folder timekey = dldata.meta.time.strftime("%Y%m%d_%H%M%S") evtdir = outdir / timekey evtdata = evtdir / 'Raw_data.pkl' evtmeta = evtdir / 'Meta_data.pkl' # Check if DL data already exist and overwrite has been set if evtdir.exists(): if evtdata.exists(): if not args.ovr: continue # Get data t1 = 0. t2 = 4.*60.*60. has_data = dldata.download_data( client=wf_client, stdata=args.localdata, ndval=args.ndval, new_sr=2., t1=t1, t2=t2, returned=True, verbose=args.verb) if not has_data: continue # Check data length if utils.checklen(dldata.data, 4.*60.*60.): print(" Error: Length Incorrect") continue # Create Folder if it doesn't exist if not evtdir.exists(): evtdir.mkdir(parents=True) # Save raw Traces pickle.dump(dldata.data, open(evtdata, "wb")) # Calculate DL orientation dldata.calc(showplot=False) if args.verb > 1: print("* R1PHI: {}".format(dldata.meta.R1phi)) print("* R2PHI: {}".format(dldata.meta.R2phi)) print("* R1CC: {}".format(dldata.meta.R1cc)) print("* R2CC: {}".format(dldata.meta.R2cc)) # Save event meta data pickle.dump(dldata.meta, open(evtmeta, "wb"))
def calc(self, showplot=False): """ Method to estimate azimuth of component `?H1` (or `?HN`). This method maximizes the normalized covariance between the Hilbert-transformed vertical component and the radial component of Rayleigh-wave data measured at multiple periods. This is done for the shortest (R1) and longest (R2) orbits of fundamental-mode Rayleigh waves. Window selection is done based on average group velocity extracted from a global model of Rayleigh-wave dispersion. Parameters ---------- showplot : bool Whether or not to plot waveforms. Attributes ---------- meta.R1phi : list List of azimuth of H1 (or HN) component (deg) based on R1, measured at different periods meta.R1cc : float Cross-correlation coefficient between hilbert-transformed vertical and radial component for R1, measured at different periods meta.R2phi : list List of azimuth of H1 (or HN) component (deg) based on R2, measured at different periods meta.R2cc : float Cross-correlation coefficient between hilbert-transformed vertical and radial component for R2, measured at different periods """ # Work on a copy of the waveform data stream = self.data.copy() # Initialize surface wave arrays nper = 7 R1phi = np.zeros(nper) R1cc = np.zeros(nper) R2phi = np.zeros(nper) R2cc = np.zeros(nper) # Load group velocity maps map10 = np.loadtxt( resource_filename('orientpy', 'dispmaps/R.gv.10.txt')) map15 = np.loadtxt( resource_filename('orientpy', 'dispmaps/R.gv.15.txt')) map20 = np.loadtxt( resource_filename('orientpy', 'dispmaps/R.gv.20.txt')) map25 = np.loadtxt( resource_filename('orientpy', 'dispmaps/R.gv.25.txt')) map30 = np.loadtxt( resource_filename('orientpy', 'dispmaps/R.gv.30.txt')) map35 = np.loadtxt( resource_filename('orientpy', 'dispmaps/R.gv.35.txt')) map40 = np.loadtxt( resource_filename('orientpy', 'dispmaps/R.gv.40.txt')) # Get parameters for R2 Rearth = 6371.25 circE = 2. * np.pi * Rearth dist2 = circE - self.meta.epi_dist baz2 = self.meta.baz + 180. if baz2 >= 360.: baz2 -= 360. # Check data length, data quality if utils.checklen(self.data, 4. * 60. * 60.): raise (Exception(" Error: Length Incorrect")) # Get path-averaged group velocities Ray1, Ray2 = utils.pathvels(self.sta.latitude, self.sta.longitude, self.meta.lat, self.meta.lon, map10, map15, map20, map25, map30, map35, map40) # Calculate arrival angle for each frequency and orbit Rf = [40., 35., 30., 25., 20., 15., 10.] LPF = [0.035, 0.03, 0.025, 0.02, 0.015, 0.01, 0.005] HPF = [0.045, 0.04, 0.035, 0.03, 0.025, 0.02, 0.015] winlen1 = [20., 17., 14., 12., 10., 10., 7.] winlen2 = [24., 20., 16., 13., 10., 10., 7.] flist = list(zip(Rf, HPF, LPF, winlen1, winlen2)) for k, item in enumerate(flist): # R1 path R1phi[k], R1cc[k] = utils.DLcalc(stream, item[0], item[1], item[2], self.meta.epi_dist, self.meta.baz, Ray1, winlen=item[3], ptype=0) # R2 path R2phi[k], R2cc[k] = utils.DLcalc(stream, item[0], item[1], item[2], dist2, baz2, Ray2, winlen=item[4], ptype=0) # Store azimuths and CC values as attributes self.meta.R1phi = R1phi self.meta.R2phi = R2phi self.meta.R1cc = R1cc self.meta.R2cc = R2cc return