def main(args=None): if args is None: # Run Input Parser args = get_correct_arguments() # Load Database # stdb>0.1.3 try: db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # stdb=0.1.3 except: db = stdb.io.load_db(fname=args.indb) # Construct station key loop allkeys = db.keys() sorted(allkeys) # Extract key subset if len(args.stkeys) > 0: stkeys = [] for skey in args.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() sorted(stkeys) # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Path where transfer functions will be located transpath = Path('TF_STA') / stkey if not transpath.is_dir(): raise (Exception("Path to " + str(transpath) + " doesn`t exist - aborting")) # Path where event data are located eventpath = Path('EVENTS') / stkey if not eventpath.is_dir(): raise (Exception("Path to " + str(eventpath) + " doesn`t exist - aborting")) # Path where plots will be saved if args.saveplot: plotpath = eventpath / 'PLOTS' if not plotpath.is_dir(): plotpath.mkdir(parents=True) else: plotpath = False # Get catalogue search start time if args.startT is None: tstart = sta.startdate else: tstart = args.startT # Get catalogue search end time if args.endT is None: tend = sta.enddate else: tend = args.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print(" ") print("|===============================================|") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d %H:%M:%S"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d %H:%M:%S"))) print("|-----------------------------------------------|") # Find all files in directories p = eventpath.glob('*.pkl') event_files = [x for x in p if x.is_file()] p = transpath.glob('*.*') trans_files = [x for x in p if x.is_file()] # Check if folders contain anything if not event_files: raise (Exception("There are no events in folder " + str(eventpath))) if not trans_files: raise (Exception("There are no transfer functions in folder " + str(transpath))) # Cycle through available files for eventfile in event_files: # Skip hidden files and folders if eventfile.name[0] == '.': continue evprefix = eventfile.name.split('.') evstamp = evprefix[0] + '.' + evprefix[1] + '.' evDateTime = UTCDateTime(evprefix[0] + '-' + evprefix[1]) if evDateTime >= tstart and evDateTime <= tend: # Load event file try: file = open(eventfile, 'rb') eventstream = pickle.load(file) file.close() except: print("File " + str(eventfile) + " exists but cannot be loaded") continue else: continue if args.fig_event_raw: fname = stkey + '.' + evstamp + 'raw' plot = plotting.fig_event_raw(eventstream, fmin=args.fmin, fmax=args.fmax) if plotpath: plot.savefig(plotpath / (fname + '.' + args.form), dpi=300, bbox_inches='tight', format=args.form) else: plot.show() # Cycle through corresponding TF files for transfile in trans_files: # Skip hidden files and folders if transfile.name[0] == '.': continue tfprefix = transfile.name.split('transfunc')[0] # This case refers to the "cleaned" spectral averages if len(tfprefix) > 9: if not args.skip_clean: yr1 = tfprefix.split('-')[0].split('.')[0] jd1 = tfprefix.split('-')[0].split('.')[1] yr2 = tfprefix.split('-')[1].split('.')[0] jd2 = tfprefix.split('-')[1].split('.')[1] date1 = UTCDateTime(yr1 + '-' + jd1) date2 = UTCDateTime(yr2 + '-' + jd2) dateev = UTCDateTime(evprefix[0] + '-' + evprefix[1]) if dateev >= date1 and dateev <= date2: print( str(transfile) + " file found - applying transfer functions") try: file = open(transfile, 'rb') tfaverage = pickle.load(file) file.close() except: print("File " + str(transfile) + " exists but cannot be loaded") continue # List of possible transfer functions for station # average files eventstream.correct_data(tfaverage) correct_sta = eventstream.correct if args.fig_plot_corrected: fname = stkey + '.' + evstamp + 'sta_corrected' plot = plotting.fig_event_corrected( eventstream, tfaverage.tf_list) # Save or show figure if plotpath: plot.savefig(plotpath / (fname + '.' + args.form), dpi=300, bbox_inches='tight', format=args.form) else: plot.show() # Save corrected data to disk correctpath = eventpath / 'CORRECTED' if not correctpath.is_dir(): correctpath.mkdir(parents=True) file = correctpath / eventfile.stem eventstream.save(str(file) + '.day.pkl') # Now save as SAC files for key, value in tfaverage.tf_list.items(): if value and eventstream.ev_list[key]: nameZ = '.sta.' + key + '.' nameZ += sta.channel + 'Z.SAC' fileZ = correctpath / (eventfile.stem + nameZ) trZ = eventstream.sth.select( component='Z')[0].copy() trZ.data = eventstream.correct[key] trZ = utils.update_stats( trZ, sta.latitude, sta.longitude, sta.elevation, 'Z') trZ.write(str(fileZ), format='SAC') # This case refers to the "daily" spectral averages else: if not args.skip_daily: if tfprefix == evstamp: print( str(transfile) + " file found - applying transfer functions") try: file = open(transfile, 'rb') tfaverage = pickle.load(file) file.close() except: print("File " + str(transfile) + " exists but cannot be loaded") continue # List of possible transfer functions for station # average files eventstream.correct_data(tfaverage) correct_day = eventstream.correct if args.fig_plot_corrected: fname = stkey + '.' + evstamp + 'day_corrected' plot = plotting.fig_event_corrected( eventstream, tfaverage.tf_list) # Save or show figure if plotpath: plot.savefig(plotpath / (fname + '.' + args.form), dpi=300, bbox_inches='tight', format=args.form) else: plot.show() # Save corrected data to disk correctpath = eventpath / 'CORRECTED' if not correctpath.is_dir(): correctpath.mkdir(parents=True) file = correctpath / eventfile.stem eventstream.save(str(file) + '.sta.pkl') # Now save as SAC files for key, value in tfaverage.tf_list.items(): if value and eventstream.ev_list[key]: nameZ = '.day.' + key + '.' nameZ += sta.channel + 'Z.SAC' fileZ = correctpath / (eventfile.stem + nameZ) trZ = eventstream.sth.select( component='Z')[0].copy() trZ.data = eventstream.correct[key] trZ = utils.update_stats( trZ, sta.latitude, sta.longitude, sta.elevation, 'Z') trZ.write(str(fileZ), format='SAC')
def main(): # Run Input Parser args = arguments.get_daylong_arguments() # Load Database db = stdb.io.load_db(fname=args.indb) # Construct station key loop allkeys = db.keys() sorted(allkeys) # Extract key subset if len(args.stkeys) > 0: stkeys = [] for skey in args.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() sorted(stkeys) # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Define path to see if it exists datapath = Path('DATA') / Path(stkey) if not datapath.is_dir(): print() print('Path to ' + str(datapath) + ' doesn`t exist - creating it') datapath.mkdir(parents=True) # Establish client if len(args.UserAuth) == 0: client = Client(args.Server) else: client = Client(args.Server, user=args.UserAuth[0], password=args.UserAuth[1]) # Get catalogue search start time if args.startT is None: tstart = sta.startdate else: tstart = args.startT # Get catalogue search end time if args.endT is None: tend = sta.startdate else: tend = args.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print() print("|===============================================|") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d"))) print("|-----------------------------------------------|") print("| Searching day-long files: |") print("| Start: {0:19s} |".format( tstart.strftime("%Y-%m-%d"))) print("| End: {0:19s} |".format( tend.strftime("%Y-%m-%d"))) # Split into 24-hour long segments dt = 3600. * 24. t1 = tstart t2 = tstart + dt while t2 <= tend: # Time stamp tstamp = str(t1.year).zfill(4) + '.' + str( t1.julday).zfill(3) + '.' print() print( "***********************************************************") print("* Downloading day-long data for key " + stkey + " and day " + str(t1.year) + "." + str(t1.julday)) print("*") print("* Channels selected: " + str(args.channels) + ' and vertical') # Define file names (to check if files already exist) # Horizontal 1 channel file1 = datapath / (tstamp + '.' + sta.channel + '1.SAC') # Horizontal 2 channel file2 = datapath / (tstamp + '.' + sta.channel + '2.SAC') # Vertical channel fileZ = datapath / (tstamp + '.' + sta.channel + 'Z.SAC') # Pressure channel fileP = datapath / (tstamp + '.' + sta.channel + 'H.SAC') if "P" not in args.channels: # If data files exist, continue if fileZ.exists() and file1.exists() and file2.exists(): if not args.ovr: print("* " + tstamp + "*SAC ") print("* -> Files already exist, " + "continuing ") t1 += dt t2 += dt continue channels = sta.channel.upper()+'1,'+sta.channel.upper() + \ '2,'+sta.channel.upper()+'Z' # Get waveforms from client try: print("* " + tstamp + "*SAC ") print("* -> Downloading Seismic data... ") sth = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel=channels, starttime=t1, endtime=t2, attach_response=True) print("* ...done") except: print(" Error: Unable to download ?H? components - " + "continuing") t1 += dt t2 += dt continue # Make sure length is ok llZ = len(sth.select(component='Z')[0].data) ll1 = len(sth.select(component='1')[0].data) ll2 = len(sth.select(component='2')[0].data) if (llZ != ll1) or (llZ != ll2): print(" Error: lengths not all the same - continuing") t1 += dt t2 += dt continue ll = int(dt * sth[0].stats.sampling_rate) if np.abs(llZ - ll) > 1: print(" Error: Time series too short - continuing") print(np.abs(llZ - ll)) t1 += dt t2 += dt continue elif "H" not in args.channels: # If data files exist, continue if fileZ.exists() and fileP.exists(): if not args.ovr: print("* " + tstamp + "*SAC ") print("* -> Files already exist, " + "continuing ") t1 += dt t2 += dt continue channels = sta.channel.upper() + 'Z' # Get waveforms from client try: print("* " + tstamp + "*SAC ") print("* -> Downloading Seismic data... ") sth = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel=channels, starttime=t1, endtime=t2, attach_response=True) print("* ...done") except: print(" Error: Unable to download ?H? components - " + "continuing") t1 += dt t2 += dt continue try: print("* -> Downloading Pressure data...") stp = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel='??H', starttime=t1, endtime=t2, attach_response=True) print("* ...done") except: print(" Error: Unable to download ??H component - " + "continuing") t1 += dt t2 += dt continue # Make sure length is ok llZ = len(sth.select(component='Z')[0].data) llP = len(stp[0].data) if (llZ != llP): print(" Error: lengths not all the same - continuing") t1 += dt t2 += dt continue ll = int(dt * stp[0].stats.sampling_rate) if np.abs(llZ - ll) > 1: print(" Error: Time series too short - continuing") print(np.abs(llZ - ll)) t1 += dt t2 += dt continue else: # If data files exist, continue if (fileZ.exists() and file1.exists() and file2.exists() and fileP.exists()): if not args.ovr: print("* " + tstamp + "*SAC ") print("* -> Files already exist, " + "continuing ") t1 += dt t2 += dt continue channels = sta.channel.upper()+'1,'+sta.channel.upper() + \ '2,'+sta.channel.upper()+'Z' # Get waveforms from client try: print("* " + tstamp + "*SAC ") print("* -> Downloading Seismic data... ") sth = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel=channels, starttime=t1, endtime=t2, attach_response=True) print("* ...done") except: print(" Error: Unable to download ?H? components - " + "continuing") t1 += dt t2 += dt continue try: print("* -> Downloading Pressure data...") stp = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel='??H', starttime=t1, endtime=t2, attach_response=True) print("* ...done") except: print(" Error: Unable to download ??H component - " + "continuing") t1 += dt t2 += dt continue # Make sure length is ok llZ = len(sth.select(component='Z')[0].data) ll1 = len(sth.select(component='1')[0].data) ll2 = len(sth.select(component='2')[0].data) llP = len(stp[0].data) if (llZ != ll1) or (llZ != ll2) or (llZ != llP): print(" Error: lengths not all the same - continuing") t1 += dt t2 += dt continue ll = int(dt * sth[0].stats.sampling_rate) if np.abs(llZ - ll) > 1: print(" Error: Time series too short - continuing") print(np.abs(llZ - ll)) t1 += dt t2 += dt continue # Remove responses print("* -> Removing responses - Seismic data") sth.remove_response(pre_filt=args.pre_filt, output='DISP') if "P" in args.channels: print("* -> Removing responses - Pressure data") stp.remove_response(pre_filt=args.pre_filt) # Detrend, filter - seismic data sth.detrend('demean') sth.detrend('linear') sth.filter('lowpass', freq=0.5 * args.new_sampling_rate, corners=2, zerophase=True) sth.resample(args.new_sampling_rate) if "P" in args.channels: # Detrend, filter - pressure data stp.detrend('demean') stp.detrend('linear') stp.filter('lowpass', freq=0.5 * args.new_sampling_rate, corners=2, zerophase=True) stp.resample(args.new_sampling_rate) # Extract traces - Z trZ = sth.select(component='Z')[0] trZ = utils.update_stats(trZ, sta.latitude, sta.longitude, sta.elevation, 'Z') trZ.write(fileZ, format='sac') # Extract traces - H if "H" in args.channels: tr1 = sth.select(component='1')[0] tr2 = sth.select(component='2')[0] tr1 = utils.update_stats(tr1, sta.latitude, sta.longitude, sta.elevation, '1') tr2 = utils.update_stats(tr2, sta.latitude, sta.longitude, sta.elevation, '2') tr1.write(file1, format='sac') tr2.write(file2, format='sac') # Extract traces - P if "P" in args.channels: trP = stp[0] trP = utils.update_stats(trP, sta.latitude, sta.longitude, sta.elevation, 'P') trP.write(fileP, format='sac') t1 += dt t2 += dt
def main(args=None): if args is None: # Run Input Parser args = get_event_arguments() # Load Database # stdb>0.1.3 try: db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # stdb=0.1.3 except: db = stdb.io.load_db(fname=args.indb) # Construct station key loop allkeys = db.keys() sorted(allkeys) # Extract key subset if len(args.stkeys) > 0: stkeys = [] for skey in args.stkeys: stkeys.extend([s for s in allkeys if skey in s]) else: stkeys = db.keys() sorted(stkeys) # Loop over station keys for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Define path to see if it exists eventpath = Path('EVENTS') / Path(stkey) if not eventpath.is_dir(): print('Path to ' + str(eventpath) + ' doesn`t exist - creating it') eventpath.mkdir(parents=True) # Establish client if len(args.UserAuth) == 0: client = Client(args.Server) else: client = Client(args.Server, user=args.UserAuth[0], password=args.UserAuth[1]) # Get catalogue search start time if args.startT is None: tstart = sta.startdate else: tstart = args.startT # Get catalogue search end time if args.endT is None: tend = sta.enddate else: tend = args.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = "--" sta.location = tlocs # Update Display print(" ") print(" ") print("|===============================================|") print("|===============================================|") print("| {0:>8s} |".format( sta.station)) print("|===============================================|") print("|===============================================|") print("| Station: {0:>2s}.{1:5s} |".format( sta.network, sta.station)) print("| Channel: {0:2s}; Locations: {1:15s} |".format( sta.channel, ",".join(tlocs))) print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format( sta.longitude, sta.latitude)) print("| Start time: {0:19s} |".format( sta.startdate.strftime("%Y-%m-%d %H:%M:%S"))) print("| End time: {0:19s} |".format( sta.enddate.strftime("%Y-%m-%d %H:%M:%S"))) print("|-----------------------------------------------|") print("| Searching Possible events: |") print("| Start: {0:19s} |".format( tstart.strftime("%Y-%m-%d %H:%M:%S"))) print("| End: {0:19s} |".format( tend.strftime("%Y-%m-%d %H:%M:%S"))) if args.maxmag is None: print("| Mag: >{0:3.1f}".format(args.minmag) + " |") else: print("| Mag: {0:3.1f} - {1:3.1f}".format( args.minmag, args.maxmag) + " |") print("| ... |") # Get catalogue using deployment start and end cat = client.get_events(starttime=tstart, endtime=tend, minmagnitude=args.minmag, maxmagnitude=args.maxmag) # Total number of events in Catalogue nevK = 0 nevtT = len(cat) print("| Found {0:5d}".format(nevtT) + " possible events |") ievs = range(0, nevtT) # Select order of processing if args.reverse: ievs = range(0, nevtT) else: ievs = range(nevtT - 1, -1, -1) # Read through catalogue for iev in ievs: # Extract event ev = cat[iev] window = 7200. new_sampling_rate = 5. time = ev.origins[0].time dep = ev.origins[0].depth lon = ev.origins[0].longitude lat = ev.origins[0].latitude epi_dist, az, baz = epi(lat, lon, sta.latitude, sta.longitude) epi_dist /= 1000. gac = k2d(epi_dist) mag = ev.magnitudes[0].mag if mag is None: mag = -9. # If distance between 85 and 120 deg: if (gac > args.mindist and gac < args.maxdist): # Display Event Info nevK = nevK + 1 if args.reverse: inum = iev + 1 else: inum = nevtT - iev + 1 print(" ") print("****************************************************") print("* #{0:d} ({1:d}/{2:d}): {3:13s}".format( nevK, inum, nevtT, time.strftime("%Y%m%d_%H%M%S"))) print("* Origin Time: " + time.strftime("%Y-%m-%d %H:%M:%S")) print("* Lat: {0:6.2f}; Lon: {1:7.2f}".format(lat, lon)) print("* Dep: {0:6.2f}; Mag: {1:3.1f}".format( dep / 1000., mag)) print("* Dist: {0:7.2f} km; {1:7.2f} deg".format( epi_dist, gac)) t1 = time t2 = t1 + window # Time stamp tstamp = str(time.year).zfill(4)+'.' + \ str(time.julday).zfill(3)+'.' tstamp = tstamp + str(time.hour).zfill(2) + \ '.'+str(time.minute).zfill(2) # Define file names (to check if files already exist) filename = eventpath / (tstamp + '.event.pkl') file1 = eventpath / (tstamp + '.1.SAC') file2 = eventpath / (tstamp + '.2.SAC') fileZ = eventpath / (tstamp + '.Z.SAC') fileP = eventpath / (tstamp + '.P.SAC') print() print("* Channels selected: " + str(args.channels) + ' and vertical') # If data file exists, continue if filename.exists(): if not args.ovr: print("*") print("* " + str(filename)) print("* -> File already exists, continuing") continue if "P" not in args.channels: # Number of channels ncomp = 3 # Comma-separated list of channels for Client channels = sta.channel.upper() + '1,' + \ sta.channel.upper() + '2,' + \ sta.channel.upper() + 'Z' # Get waveforms from client try: print("* " + tstamp + " ") print("* -> Downloading Seismic data... ") sth = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel=channels, starttime=t1, endtime=t2, attach_response=True) print("* ...done") except: print(" Error: Unable to download ?H? components - " + "continuing") continue st = sth elif "H" not in args.channels: # Number of channels ncomp = 2 # Comma-separated list of channels for Client channels = sta.channel.upper() + 'Z' # Get waveforms from client try: print("* " + tstamp + " ") print("* -> Downloading Seismic data... ") sth = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel=channels, starttime=t1, endtime=t2, attach_response=True) print("* ...done") except: print(" Error: Unable to download ?H? components - " + "continuing") continue try: print("* -> Downloading Pressure data...") stp = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel='?DH', starttime=t1, endtime=t2, attach_response=True) print("* ...done") if len(stp) > 1: print("WARNING: There are more than one ?DH trace") print("* -> Keeping the highest sampling rate") if stp[0].stats.sampling_rate > \ stp[1].stats.sampling_rate: stp = Stream(traces=stp[0]) else: stp = Stream(traces=stp[1]) except: print(" Error: Unable to download ?DH component - " + "continuing") continue st = sth + stp else: # Comma-separated list of channels for Client ncomp = 4 # Comma-separated list of channels for Client channels = sta.channel.upper() + '1,' + \ sta.channel.upper() + '2,' + \ sta.channel.upper() + 'Z' # Get waveforms from client try: print("* " + tstamp + " ") print("* -> Downloading Seismic data... ") sth = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel=channels, starttime=t1, endtime=t2, attach_response=True) print("* ...done") except: print(" Error: Unable to download ?H? components - " + "continuing") continue try: print("* -> Downloading Pressure data...") stp = client.get_waveforms(network=sta.network, station=sta.station, location=sta.location[0], channel='?DH', starttime=t1, endtime=t2, attach_response=True) print(" ...done") if len(stp) > 1: print("WARNING: There are more than one ?DH trace") print("* -> Keeping the highest sampling rate") if stp[0].stats.sampling_rate > \ stp[1].stats.sampling_rate: stp = Stream(traces=stp[0]) else: stp = Stream(traces=stp[1]) except: print(" Error: Unable to download ?DH component - " + "continuing") continue st = sth + stp # Detrend, filter st.detrend('demean') st.detrend('linear') st.filter('lowpass', freq=0.5 * args.new_sampling_rate, corners=2, zerophase=True) st.resample(args.new_sampling_rate) # Check streams is_ok, st = utils.QC_streams(t1, t2, st) if not is_ok: continue sth = st.select(component='1') + st.select(component='2') + \ st.select(component='Z') # Remove responses print("* -> Removing responses - Seismic data") sth.remove_response(pre_filt=args.pre_filt, output=args.units) # Extract traces - Z trZ = sth.select(component='Z')[0] trZ = utils.update_stats(trZ, sta.latitude, sta.longitude, sta.elevation, 'Z') trZ.write(str(fileZ), format='SAC') # Extract traces - H if "H" in args.channels: tr1 = sth.select(component='1')[0] tr2 = sth.select(component='2')[0] tr1 = utils.update_stats(tr1, sta.latitude, sta.longitude, sta.elevation, '1') tr2 = utils.update_stats(tr2, sta.latitude, sta.longitude, sta.elevation, '2') tr1.write(str(file1), format='SAC') tr2.write(str(file2), format='SAC') if "P" in args.channels: stp = st.select(component='H') print("* -> Removing responses - Pressure data") stp.remove_response(pre_filt=args.pre_filt) trP = stp[0] trP = utils.update_stats(trP, sta.latitude, sta.longitude, sta.elevation, 'P') trP.write(str(fileP), format='SAC') else: stp = Stream() # Write out SAC data eventstream = EventStream(sta, sth, stp, tstamp, lat, lon, time, window, args.new_sampling_rate, ncomp) eventstream.save(filename)