else: # get station information all_tags = [1] sta = tmps.split('/')[-1] #----loop through each stream---- for itag in range(len(all_tags)): if flag: print("working on station %s and trace %s" % (sta, all_tags[itag])) # read waveform data if input_fmt == 'asdf': source = ds.waveforms[tmps][all_tags[itag]] else: source = obspy.read(tmps) inv1 = noise_module.stats2inv(source[0].stats, fc_para, locs) sta, net, lon, lat, elv, loc = noise_module.sta_info_from_inv( inv1) # channel info comp = source[0].stats.channel if comp[-1] == 'U': comp.replace('U', 'Z') if len(source) == 0: continue # cut daily-long data into smaller segments (dataS always in 2D) trace_stdS, dataS_t, dataS = noise_module.cut_trace_make_statis( fc_para, source) # optimized version:3-4 times faster if not len(dataS): continue N = dataS.shape[0] # do normalization if needed
source = obspy.Stream() for ifile in tttfiles: try: tr = obspy.read(ifile) for ttr in tr: source.append(ttr) except Exception as inst: print(inst);continue # jump if no good data left if not len(source):continue # make inventory to save into ASDF file t1=time.time() inv1 = noise_module.stats2inv(source[0].stats,prepro_para,locs=locs) tr = noise_module.preprocess_raw(source,inv1,prepro_para,date_info) if np.all(tr[0].data==0):continue t2 = time.time() if flag:print('pre-processing takes %6.2fs'%(t2-t1)) # jump if no good data left if not len(tr):continue # ready for output ff=os.path.join(DATADIR,all_chunk[ick]+'T'+all_chunk[ick+1]+'.h5') if not os.path.isfile(ff): with pyasdf.ASDFDataSet(ff,mpi=False,compression="gzip-3",mode='w') as ds: pass with pyasdf.ASDFDataSet(ff,mpi=False,compression="gzip-3",mode='a') as ds:
#-------------------------------------------------------------- #---what if this station has several segments in that day------ #-------------------------------------------------------------- sacfile = os.path.basename(tfile) try: source = obspy.read(tfile) except Exception as inst: print(inst) continue comp = source[0].stats.channel if flag: print("working on sacfile %s" % sacfile) #---------make an inventory--------- inv1 = noise_module.stats2inv(source[0].stats) if prepro: t0 = time.time() source = noise_module.preprocess_raw( source, downsamp_freq, checkt, pre_filt, resp, resp_dir) if len(source) == 0: continue t1 = time.time() if flag: print("prepro takes %f s" % (t1 - t0)) #----------variables to define days with earthquakes---------- all_madS = noise_module.mad(source[0].data) all_stdS = np.std(source[0].data)
#-------------------------------------------------------------- #---what if this station has several segments in that day------ #-------------------------------------------------------------- sacfile = os.path.basename(tfile) try: source = obspy.read(tfile) except Exception as inst: print(inst) continue comp = source[0].stats.channel if flag: print("working on sacfile %s" % sacfile) #---------make an inventory--------- inv1 = noise_module.stats2inv(source[0].stats) if prepro: t0 = time.time() source = noise_module.preprocess_raw( source, inv1, downsamp_freq, checkt, pre_filt, resp, resp_dir) if len(source) == 0: continue t1 = time.time() if flag: print("prepro takes %f s" % (t1 - t0)) #----------variables to define days with earthquakes---------- all_madS = noise_module.mad(source[0].data) all_stdS = np.std(source[0].data)
try: source1 = obspy.read(tfile) bigsource.append(source1[0]) print(source1) except Exception as inst: print(type(inst)) continue if flag: print("working on sacfile %s" % sacfile) if len(bigsource) == 0: continue print(bigsource) #---------make an inventory--------- inv1 = noise_module.stats2inv(bigsource[0].stats, locs=locs) #------------Pre-Processing----------- #source = obspy.Stream() bigsource = bigsource.merge(method=1, fill_value='interpolate') #[0] bigsource = noise_module.preprocess_raw(bigsource, downsamp_freq, clean_time=True, pre_filt=None, resp=None, respdir=None) print(bigsource) with pyasdf.ASDFDataSet(data_h5, mpi=False,