def Splitwave_CrossC(st_cut, plot=False): tmp = st_cut north = tmp[1].data east = tmp[0].data sample_interval = tmp[0].stats.delta realdata = sw.Pair(north, east, delta=sample_interval) ## write something for time window t1 = 10 t2 = 70 realdata.set_window(t1, t2) # realdata.plot() m = sw.CrossM(realdata, lags=(2, )) try: path_Methods = '{0}/../SplitWave_Results/Methods/CrossC/{1}/'.format( save_loc, st_cut[0].stats.station) os.mkdir(path_Methods) except: pass m.save('{0}/../SplitWave_Results/Methods/CrossC/{1}/{1}_{2}.eig'.format( save_loc, st_cut[0].stats.station, st_cut[0].stats.starttime.strftime("%Y-%m-%d"))) if plot == True: m.plot() return m.fast, m.dfast, round(m.lag, 4), round(m.dlag, 4)
def process_st(st,tt,trim=120): """ Function that filters, trims (so that there are an even number of points) and windows the traces. If pair does not exist it is created here Traces are filtered between 2 and 100 seconds (0.01Hz and 0.5Hz) """ st.filter("bandpass",freqmin= 0.01, freqmax= 0.5,corners=2,zerophase=True) tt_UTC = st[0].stats.starttime + tt # phase predicted arrival as a UTCDateTimeobject so we can have it as the center of the trim. st.trim(tt_UTC-trim,tt_UTC + trim) pair = sw.Pair(st[1].data,st[0].data,delta= st[0].stats.delta) pair.plot(pick=True, marker = trim) w1,w2 = pair.wbeg(),pair.wend() wbeg_origin_time,wend_origin_time = (tt - trim + w1),(tt - trim + w2) # This should make my wbeg,wend relative to the origin time of the event) return pair, [wbeg_origin_time, wend_origin_time]
def Splitwave_EigenM(st_cut, plot=False): # get data into Pair object and plot tmp = st_cut north = tmp[1].data east = tmp[0].data sample_interval = tmp[0].stats.delta realdata = sw.Pair(north, east, delta=sample_interval) ## write something for time window t1 = 10 t2 = 50 realdata.set_window(t1, t2) # realdata.plot() measure = sw.EigenM(realdata) if plot == True: m.plot() return measure.fast, measure.dfast, round(m.lag, 4), round(m.dlag, 4)
def st_prep(st,f_min,f_max): """ Prepares Stream for spltting analysis (by bandpass filtering and trimming) and then creates the Pair object """ if st[0].data.size != st[1].data.size: #Check that the streams are the same length len_diff = abs(st[0].data.size - st[1].data.size) # length difference in number of point if st[0].data.size > st[1].data.size : # st[0] longer so trim from its end st[0].trim(0,st[1].stats.endtime - ((len_diff)*st[0].stats.delta)) print('East component is {} points longer than North component, trimming'.format(len_diff)) elif st[1].data.size > st[0].data.size : # st[1] longer so trim from its end st[1].trim(0,st[1].stats.endtime - ((len_diff)*st[1].stats.delta)) print('North component is {} points longer than East component, trimming'.format(len_diff)) # print(st[0].data.size,st[1].data.size) if st[0].data.size %2 == 0 or st[1].data.size %2 == 0 : # tests to see if there is an even nukber of points # print("Even number of points, trimming by 3") st = st.trim(0,st[0].stats.endtime - 3*(st[0].stats.delta)) # print(st[0].data.size,st[1].data.size) st.filter("bandpass",freqmin= f_min, freqmax= f_max,corners=2,zerophase=True) # Zerophase bandpass filter of the streams pair = sw.Pair(st[1].data,st[0].data,delta = st[0].stats.delta) return pair
def read_data(path): try: dfile = os.listdir('C:/Users/kkapa/Desktop/RAJ-SKS') loop = len(dfile) inc = 0 except: raise Exception("file not found") while (inc != loop - 1 and inc != loop - 2 and inc != loop - 3): s1, s2, s3 = '', '', '' refile = '' while (True): try: ext = dfile[inc][-3::1] if (ext == 'sac' or ext == 'SAC'): s1 += dfile[inc] refile += dfile[inc] inc += 1 # print(s1) break else: inc += 1 except: inc += 1 while (True): try: ext = dfile[inc][-3::1] if (ext == 'sac' or ext == 'SAC'): s2 += dfile[inc] inc += 1 # print(s2) break else: inc += 1 except: inc += 1 while (True): try: ext = dfile[inc][-3::1] if (ext == 'sac' or ext == 'SAC'): s3 += dfile[inc] inc += 1 # print(s3) break else: inc += 1 except: inc += 1 st = read(path + '/' + s1, debug_headers=True) + read( path + '/' + s2, debug_headers=True) + read(path + '/' + s3, debug_headers=True) # st = read('2011.052.10.57.52.4000.XX.KTL.00.BHE.M.sac', debug_headers=True) # st += read('2011.052.10.57.52.4000.XX.KTL.00.BHN.M.sac', debug_headers=True) # st += read('2011.052.10.57.52.4000.XX.KTL.00.BHZ.M.sac', debug_headers=True) #2011.052.10.57.52.4000.XX.KTL.00.BHZ.M 2011.052.10.57.52.4000.XX.KTL.00.BHN.M 2011.052.10.57.52.4000.XX.KTL.00.BHE.M # for #extarcting the required information form the data tr = st[0] # print(tr.stats) # st.plot() evtime = tr.stats['starttime'] endtime = tr.stats['endtime'] tr = tr.stats['sac'] tr = dict(tr) b = tr['b'] evla = tr['evla'] evlo = tr['evlo'] stla = tr['stla'] stlo = tr['stlo'] evdp = tr['evdp'] model = TauPyModel('iasp91') arrivals = model.get_travel_times_geo(evdp, evla, evlo, stla, stlo, phase_list=['SKS']) skstime = evtime + arrivals[0].time - b # print(skstime) #applying filters dist, az, baz = geodetics.base.gps2dist_azimuth(evla, evlo, stla, stlo) figurefile = path + '/' + refile[0:30] resultfile = refile[0:30] + '_results.txt' resultfile = path + '/' + resultfile f = open(resultfile, 'w') # f=open('2011.052.10.57.52'+'_result1.txt','w') f.write(' EventId' + '\t ' + 'Baz' + '\t\t' + ' filter' + '\t\t' + 'SI' + '\t' + 'Split/Null' + '\t\t\t' + 'EigenM' + '\t\t\t' + ' TransM' + '\t\t\t' + ' CrossM' + '\t\t\t\t' + '\n') f.write('2011.052.10.57.52 ' + str(round(baz, 2)) + '\t' + 'f1' + '\t' + 'f2' + '\t' + '\t' + '\t\t' + '|' + 'phi' + '\t' + 'dev' + '\t' + 't' + '\t' + 'dt' + '\t' + '|' + 'phi' + '\t' + 'dev' + '\t' + 't' + '\t' + 'dt' + '\t' + '|' + 'phi' + '\t' + 'dev' + '\t' + 't' + '\t' + 'dt' + '\t' + '\n') f.close() for j in range(len(f1)): st.filter("bandpass", freqmin=f1[j], freqmax=f2[j]) # st.plot() # trim around SKS st.trim(skstime - minsks, skstime + maxsks) # st.plot() #creating pair north = st[1].data east = st[0].data sample_interval = st[0].stats.delta # print(sample_interval) realdata = sw.Pair(north, east, delta=sample_interval) si = realdata.splitting_intensity() x, y = realdata.cordinatewindow() diff = int(y - x) - windowsize # realdata.plot() try: #initial EigenM measure = sw.EigenM(realdata) temp = measure.measurements() print(-1, temp) temp = list(temp) temp.append(-1) m = [] m.append(temp) #initial TransM measure1 = sw.TransM(realdata, pol=baz) temp = measure1.measurements() print(-1, temp) temp = list(temp) temp.append(-1) m1 = [] m1.append(temp) #initial CrossM measure2 = sw.CrossM(realdata) temp = measure2.measurements() print(-1, temp) temp = list(temp) temp.append(-1) m2 = [] m2.append(temp) except: print("please check this data manually canot apply filter ", j + 1) print("for file names") print(s1, s2, s3) continue #setting windows for i in range(diff): a = realdata # a.plot() try: a.set_window(x + i, x + windowsize + i) # a.plot() try: #EigenM measure = sw.EigenM(a) temp = measure.measurements() print(i, measure.measurements()) temp = list(temp) temp.append(i) m.append(temp) #TransM measure1 = sw.TransM(realdata, pol=baz) temp = measure1.measurements() print(i, measure1.measurements()) temp = list(temp) temp.append(i) m1.append(temp) #CrossM measure2 = sw.CrossM(a) temp = measure2.measurements() print(i, measure2.measurements()) temp = list(temp) temp.append(i) m2.append(temp) except: continue except: continue try: index, ti = bestvalue(m) print(index) phi, dev, t, dt = m[ti][0], m[ti][1], m[ti][2], m[ti][3] a = realdata if (index == -1): a.set_window(x, y) else: a.set_window(x + index, x + windowsize + index) # a.plot() measure = sw.EigenM(a) # measure.plot() fname = figurefile + '_EigenM_' + str(j) + '.pdf' # to save the plot pass 'save' and file name # to save and show the plot pass 'showandsave' and file name #to show the plot pass nothing measure.plot('save', fname) # f=open('2011.052.10.57.52'+'_result.txt','a') # f.write('eigenm'+'\t'+str(f1[j])+'\t'+str(f2[j])+'\t'+str(phi)+'\t'+str(dev)+'\t'+str(t)+'\t'+str(dt)+'\n') # f.close() except: print("not saved") continue try: index, ti = bestvalue(m1) print(index) phi1, dev1, t1, dt1 = m1[ti][0], m1[ti][1], m1[ti][2], m1[ti][ 3] a = realdata if (index == -1): a.set_window(x, y) else: a.set_window(x + index, x + windowsize + index) # a.plot() measure1 = sw.TransM(a, pol=baz) # measure1.plot() fname = figurefile + '_TransM_' + str(j) + '.pdf' measure1.plot('save', fname) # f=open('2011.052.10.57.52'+'_result.txt','a') # f.write('transm'+'\t'+str(f1[j])+'\t'+str(f2[j])+'\t'+str(phi)+'\t'+str(dev)+'\t'+str(t)+'\t'+str(dt)+'\n') # f.close() except: continue try: index, ti = bestvalue(m2) print(index) phi2, dev2, t2, dt2 = m2[ti][0], m2[ti][1], m2[ti][2], m2[ti][ 3] a = realdata if (index == -1): a.set_window(x, y) else: a.set_window(x + index, x + windowsize + index) # a.plot() measure2 = sw.CrossM(a) # measure2.plot() fname = figurefile + '_CrossM_' + str(j) + '.pdf' measure2.plot('save', fname) # f=open('2011.052.10.57.52'+'_result.txt','a') # f.write('CrossM'+'\t'+str(f1[j])+'\t'+str(f2[j])+'\t'+str(phi)+'\t'+str(dev)+'\t'+str(t)+'\t'+str(dt)+'\n') # f.write('\n') # f.close() except: continue try: f = open(resultfile, 'a') f.write('\t' + '\t\t\t' + str(f1[j]) + '\t' + str(f2[j]) + '\t' + str(round(si, 2)) + '\t\t\t' + '|' + str(round(phi, 3)) + '\t' + str(round(dev, 3)) + '\t' + str(round(t, 3)) + '\t' + str(round(dt, 3)) + '\t' + '|' + str(round(phi1, 3)) + '\t' + str(round(dev1, 3)) + '\t' + str(round(t1, 3)) + '\t' + str(round(dt1, 3)) + '\t' + '|' + str(round(phi2, 3)) + '\t' + str(round(dev2, 3)) + '\t' + str(round(t2, 3)) + '\t' + str(round(dt2, 3)) + '\t' + '\n') f.close() except: print("canot write") continue
def SKScalc(self, dataSKSfileloc, trace_loc_ENZ=None, trace_loc_RTZ=None, trigger_loc=None, method='None'): # self.logger.info("Cut the traces around the SKS arrival") sksfiles = glob.glob( dataSKSfileloc + f"*-{str(inpSKSdict['filenames']['data_sks_suffix'])}.h5") # self.logger.info(sksfiles) # all_measurements = open(self.plot_measure_loc+"../"+"sks_measurements_all.txt",'w') # all_measurements.write("NET STA LON LAT AvgFastDir AvgLagTime NumMeasurements NumNull\n") all_meas_start, all_meas_close = True, False meas_file = self.plot_measure_loc + 'done_measurements.txt' f, finished_file, finished_events = measure_status( meas_file) #track the measurements for i, sksfile in enumerate(sksfiles): count = 0 data = read_rf(sksfile, 'H5') self.logger.info(f"SKS measurements for {sksfile}\n") net_name = os.path.basename(sksfile).split("-")[0] stn_name = os.path.basename(sksfile).split("-")[1] stn_meas_close = False # if stn_meas_start: sks_measurements_stn = self.plot_measure_loc + f"{net_name}_{stn_name}_{str(inpSKSdict['filenames']['sks_meas_indiv'])}" null_measurements_stn = self.plot_measure_loc + f"{net_name}_{stn_name}_null_measurements.txt" if not os.path.exists(sks_measurements_stn): sks_meas_file = sks_measure_file_start( sks_measurements_stn, data[0].stats.station_longitude, data[0].stats.station_latitude, "EventTime EvLong EvLat Evdp Baz FastDirection(degs) deltaFastDir(degs) LagTime(s) deltaLagTime(s) SI\n" ) sks_meas_file_null = sks_measure_file_start( null_measurements_stn, data[0].stats.station_longitude, data[0].stats.station_latitude, "EventTime EvLong EvLat Evdp Baz\n") stn_meas_close = True plt_id = f"{net_name}-{stn_name}" measure_list, squashfast_list, squashlag_list = [], [], [] fast_dir_all, lag_time_all = [], [] num_measurements, num_null = 0, 0 for stream3c in IterMultipleComponents(data, 'onset', 3): count += 1 ## check if the length of all three traces are equal tr_lens = [] for tr in stream3c: lentr = tr.stats.npts tr_lens.append(lentr) lengt = tr.stats.sampling_rate * 100 if lentr != lengt: continue if not len(set(tr_lens)) == 1: continue if sksfile in finished_file and str( stream3c[0].stats.event_time) in finished_events: continue else: if all_meas_start: all_measurements = open( self.plot_measure_loc + "../" + "sks_measurements_all.txt", 'w') all_measurements.write( "NET STA LON LAT AvgFastDir AvgLagTime NumMeasurements NumNull\n" ) all_meas_start = False all_meas_close = True f.write("{},{}\n".format(sksfile, stream3c[0].stats.event_time)) ## check if the length of all three traces are equal len_tr_list = list() for tr in stream3c: len_tr_list.append(len(tr)) if len(set(len_tr_list)) != 1: self.logger.warning( f"{count}/{int(len(data)/3)}[{i}/{len(sksfiles)}] Bad trace: {stream3c[0].stats.event_time}" ) continue ## filter the trace st = stream3c.filter( 'bandpass', freqmin=float( inpSKSdict['sks_filter_settings']['minfreq']), freqmax=float( inpSKSdict['sks_filter_settings']['maxfreq'])) st.detrend('linear') # st.taper(max_percentage=0.05, type="hann") sps = st[0].stats.sampling_rate t = st[0].stats.starttime ## trim the trace trace1 = st.trim( t + int(inpSKSdict['sks_picking']['trimstart']), t + int(inpSKSdict['sks_picking']['trimend'])) ## plot the ENZ if trace_loc_ENZ: plot_trace(trace1, trace_loc_ENZ) ## Rotate to RTZ ## trace2[0]->BHT; trace2[1]->BHR; trace2[2]->BHZ; trace1.rotate('NE->RT') evyear = trace1[0].stats.event_time.year evmonth = trace1[0].stats.event_time.month evday = trace1[0].stats.event_time.day evhour = trace1[0].stats.event_time.hour evminute = trace1[0].stats.event_time.minute # ## plot all three traces RTZ if trace_loc_RTZ: plot_trace(trace1, trace_loc_RTZ) ###################### # Different picker methods ###################### ### operating on transverse component if method == "recursive_sta_lta": # self.logger.info(f"Method is {method}") cft = recursive_sta_lta(trace1[1].data, int(1 * sps), int(5 * sps)) threshold = ( float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr0']), float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr1'])) #(2.5,0.65) on_off = np.array( trigger_onset(cft, threshold[0], threshold[1])) if trigger_loc and on_off.shape[0] == 1: outfile = trigger_loc + f'{plt_id}-{trace1[0].stats.event_time}-trigger.png' plot_trigger(trace1[1], cft, on_off, threshold[0], threshold[1], outfile=outfile) elif method == "classic_sta_lta": cft = classic_sta_lta(trace1[1].data, int(5 * sps), int(10 * sps)) threshold = ( float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr0']), float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr1'])) #(1.5, 0.5) on_off = np.array( trigger_onset(cft, threshold[0], threshold[1])) elif method == "z_detect": cft = z_detect(trace1[1].data, int(10 * sps)) threshold = ( float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr0']), float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr1'])) #(-0.4, -0.3) on_off = np.array( trigger_onset(cft, threshold[0], threshold[1])) elif method == "carl_sta_trig": cft = carl_sta_trig(trace1[1].data, int(5 * sps), int(10 * sps), 0.8, 0.8) threshold = ( float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr0']), float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr1'])) #(20.0, -20.0) on_off = np.array( trigger_onset(cft, threshold[0], threshold[1])) elif method == "delayed_sta_lta": cft = delayed_sta_lta(trace1[1].data, int(5 * sps), int(10 * sps)) threshold = ( float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr0']), float(inpSKSdict['sks_picking']['picking_algo'] ['sks_picking_algo_thr1'])) #(5, 10) on_off = np.array( trigger_onset(cft, threshold[0], threshold[1])) else: self.logger.info("No valid method specified") pass if on_off.shape[0] == 1: trace1.rotate('RT->NE') trace2 = trace1 realdata = sw.Pair( trace2[1].data, trace2[0].data, delta=1 / sps ) #creates Pair from two traces, delta: sample interval try: measure = sw.EigenM( realdata, lags=(float(inpSKSdict['sks_measurement_contrains'] ['lag_settings']['minlag']), float(inpSKSdict['sks_measurement_contrains'] ['lag_settings']['maxlag']), 40)) except Exception as e: self.logger.error(e) continue d = measure.srcpoldata_corr().chop() snr = sw.core.snrRH( d.x, d.y ) #Restivo and Helffrich (1999) signal to noise ratio # print(d.x,d.y) # print("splitting intensity",splitting_intensity(d)) ##sum the error surfaces along each of the axes, to "squash" the surface into two profiles, one for fast and one for lag ## the result is best defined for the lam1/lam2 surface than the lam1 surface, or the lam2 surface #- Jack Walpole squashfast = np.sum(measure.lam1 / measure.lam2, axis=0) squashlag = np.sum(measure.lam1 / measure.lam2, axis=1) mean_max_lam12_fast = np.max(squashfast) / np.mean( squashfast) mean_max_lam12_lag = np.max(squashlag) / np.mean(squashlag) ## Null test ## The measurements that fail the constrain of the maximum allowed error in delay time and the maximum delay time can be associated with null measurements because this happens due little energy on the transverse component to constrain delay time (Evans et al., 2006). diff_mult = auto_null_measure(measure, squashfast, squashlag, plot_null=False) null_thresh = 0.05 #below this value, the measurement is classified as null if diff_mult < null_thresh: if stn_meas_close: sks_meas_file_null.write( "{} {:8.4f} {:8.4f} {:4.1f}\n".format( trace1[0].stats.event_time, trace1[0].stats.event_longitude, trace1[0].stats.event_latitude, trace1[0].stats.event_depth, trace1[0].stats.back_azimuth)) self.logger.info("{}/{} Null measurement {}".format( count, int(len(data) / 3), trace1[0].stats.event_time)) num_null += 1 else: if str(inpSKSdict['sks_measurement_contrains'] ['sel_param']) == "snr": filtres = filter_pick_snr(measure, inpSKSdict, snr) elif str(inpSKSdict['sks_measurement_contrains'] ['sel_param']) == "lam12": filtres = filter_pick_lam12( measure, inpSKSdict, mean_max_lam12_fast, mean_max_lam12_lag) ## if filtres: num_measurements += 1 if stn_meas_close: sks_meas_file.write( "{} {:8.4f} {:8.4f} {:4.1f} {:6.1f} {:6.1f} {:.1f} {:.1f} {:.2f} {:.2f}\n" .format(trace1[0].stats.event_time, trace1[0].stats.event_longitude, trace1[0].stats.event_latitude, trace1[0].stats.event_depth, trace1[0].stats.back_azimuth, measure.fast, measure.dfast, measure.lag, measure.dlag, splitting_intensity(d))) if self.plot_measure_loc and bool( inpSKSdict['sks_measurement_plot'] ['measurement_snapshot']): plot_SKS_measure(measure) plt.savefig( self.plot_measure_loc + f'{plt_id}-{evyear}_{evmonth}_{evday}_{evhour}_{evminute}.png' ) plt.close('all') self.logger.info( "{}/{} [{}/{}] Good measurement: {}; fast = {:.2f}+-{:.2f}, lag = {:.2f}+-{:.2f}" .format(count, int(len(data) / 3), i, len(sksfiles), trace1[0].stats.event_time, measure.fast, measure.dfast, measure.lag, measure.dlag)) if int(inpSKSdict['error_plot_toggles'] ['error_plot_indiv']): errorplot( measure, squashfast, squashlag, figname=self.plot_measure_loc + f'errorplot_{plt_id}-{evyear}_{evmonth}_{evday}_{evhour}_{evminute}.png' ) polar_error_surface( measure, figname=self.plot_measure_loc + f'errorplot_polar_{plt_id}-{evyear}_{evmonth}_{evday}_{evhour}_{evminute}.png' ) if int(inpSKSdict['error_plot_toggles'] ['error_plot_all']): measure_list.append(measure) squashfast_list.append(squashfast) squashlag_list.append(squashlag) fast_dir = measure.degs[0, np.argmax(squashfast)] #to be sure the measurements are on the same half of projection if fast_dir < -45 and fast_dir > -91: fast_dir = fast_dir + 180 else: fast_dir = fast_dir fast_dir_all.append(fast_dir) lag_time_all.append( measure.lags[np.argmax(squashlag), 0]) else: self.logger.info( "{}/{} [{}/{}] Bad measurement: {}! dfast = {:.1f}, dlag = {:.1f}, snr: {:.1f}" .format( count, int(len(data) / 3), i, len(sksfiles), stream3c[0].stats.event_time, measure.dfast, measure.dlag, snr)) #; Consider changing the trim window else: self.logger.info( f"{count}/{int(len(data)/3)} [{i}/{len(sksfiles)}] Bad phase pick: {stream3c[0].stats.event_time}" ) if stn_meas_close: sks_meas_file.close() sks_meas_file_null.close() if bool(inpSKSdict['error_plot_toggles'] ['error_plot_all']) and count > 0: errorplot_all(measure_list, squashfast_list, squashlag_list, np.array(fast_dir_all), np.array(lag_time_all), figname=self.plot_measure_loc + f'errorplot_{plt_id}.png') ## Splitting intensity vs backazimuth if bool(inpSKSdict['sks_measurement_plot']['plot_SI']): sks_meas_file = self.plot_measure_loc + f"{net_name}_{stn_name}_{str(inpSKSdict['filenames']['sks_meas_indiv'])}" outfig = self.plot_measure_loc + f"{net_name}_{stn_name}_BAZ_SI.png" if os.path.exists( sks_meas_file) and not os.path.exists(outfig): plot_baz_si_map(sks_meas_file=sks_meas_file, outfig=outfig) if all_meas_close: mean_fast_dir_all = mean_angle(fast_dir_all) if len( fast_dir_all) else 0 all_measurements.write( "{} {} {:.4f} {:.4f} {:.2f} {:.1f} {} {}\n".format( net_name, stn_name, data[0].stats.station_longitude, data[0].stats.station_latitude, mean_fast_dir_all, np.mean(lag_time_all), num_measurements, num_null)) f.close() if all_meas_close: all_measurements.close()