# add the inventory for all components + all time of this tation if (not ds.waveforms.list()): ds.add_stationxml(sta_inv) try: # get data t0 = time.time() tr = client.get_waveforms(network=net[ista],station=sta[ista],\ channel=chan[ista],location=location[ista],starttime=s1,endtime=s2) t1 = time.time() except Exception as e: print(e) continue # preprocess to clean data tr = noise_module.preprocess_raw(tr, sta_inv, prepro_para, date_info) t2 = time.time() if len(tr): new_tags = '{0:s}_{1:s}'.format(chan[ista].lower(), location[ista].lower()) print(new_tags) ds.add_waveforms(tr, tag=new_tags) if flag: print(ds) print('downloading data %6.2f s; pre-process %6.2f s' % ((t1 - t0), (t2 - t1))) tt1 = time.time() print('downloading step takes %6.2f s' % (tt1 - tt0)) comm.barrier()
source = obspy.Stream() for ifile in tttfiles: try: tr = obspy.read(ifile) for ttr in tr: source.append(ttr) except Exception as inst: print(inst);continue # jump if no good data left if not len(source):continue # make inventory to save into ASDF file t1=time.time() inv1 = noise_module.stats2inv(source[0].stats,prepro_para,locs=locs) tr = noise_module.preprocess_raw(source,inv1,prepro_para,date_info) if np.all(tr[0].data==0):continue t2 = time.time() if flag:print('pre-processing takes %6.2fs'%(t2-t1)) # jump if no good data left if not len(tr):continue # ready for output ff=os.path.join(DATADIR,all_chunk[ick]+'T'+all_chunk[ick+1]+'.h5') if not os.path.isfile(ff): with pyasdf.ASDFDataSet(ff,mpi=False,compression="gzip-3",mode='w') as ds: pass with pyasdf.ASDFDataSet(ff,mpi=False,compression="gzip-3",mode='a') as ds: # add the inventory for all components + all time of this tation
if flag: print("working on trace " + all_tags[itag]) source = ds.waveforms[temp[0]][all_tags[itag]] comp = source[0].stats.channel if prepro: if all_tags[itag].split('_')[0] != 'raw': #raise ValueError('it appears pre-processing has been performed!') print( 'warning! it appears pre-processing has been performed!' ) t0 = time.time() source = noise_module.preprocess_raw( source, inv1, downsamp_freq, checkt, pre_filt, resp, resp_dir) t1 = time.time() if flag: print("prepro takes %f s" % (t1 - t0)) #----------variables to define days with earthquakes---------- all_madS = noise_module.mad(source[0].data) all_stdS = np.std(source[0].data) if all_madS == 0 or all_stdS == 0 or np.isnan( all_madS) or np.isnan(all_stdS): print("continue! madS or stdS equeals to 0 for %s" % source) continue trace_madS = []
try: # get data t0 = time.time() tr = client.get_waveforms(network=K.code, station=sta.code, channel=chan.code, location='*', \ starttime = t1, endtime=t2) t1 = time.time() except Exception as e: print(e) continue if len(tr): # clean up data t2 = time.time() tr = noise_module.preprocess_raw( tr, NewFreq, checkt, pre_filt, resp, respdir) t3 = time.time() # only keep the one with good data after processing if len(tr) > 0: if len(tr) == 1: new_tags = tags+'_{0:04d}_{1:02d}_{2:02d}_{3}'.format(tr[0].stats.starttime.year,\ tr[0].stats.starttime.month,tr[0].stats.starttime.day,chan.code.lower()) ds.add_waveforms(tr, tag=new_tags) else: for ii in range(len(tr)): new_tags = tags+'_{0:04d}_{1:02d}_{2:02d}_{3}'.format(tr[ii].stats.starttime.year,\ tr[ii].stats.starttime.month,tr[ii].stats.starttime.day,chan.code.lower()) ds.add_waveforms(tr[ii], tag=new_tags) if flag:
if len(bigsource) == 0: continue print(bigsource) #---------make an inventory--------- inv1 = noise_module.stats2inv(bigsource[0].stats, locs=locs) #------------Pre-Processing----------- #source = obspy.Stream() bigsource = bigsource.merge(method=1, fill_value='interpolate') #[0] bigsource = noise_module.preprocess_raw(bigsource, downsamp_freq, clean_time=True, pre_filt=None, resp=None, respdir=None) print(bigsource) with pyasdf.ASDFDataSet(data_h5, mpi=False, compression=None) as fft_ds: fft_ds.add_stationxml(inv1) for ii in range(len(bigsource)): print((bigsource[ii])) if isinstance(bigsource[ii], np.float32): continue fft_ds.add_waveforms(bigsource[ii], tag='waveforms') print(bigsource[ii].stats.starttime)