Пример #1
0
                    elif event_type == 0:
                        #Pick hpol
                        p2p_cut = numpy.all(p2p[: , 1::2] < 16 ,axis=1)
                        event_cut = numpy.where(numpy.logical_and(p2p_cut,numpy.logical_and(numpy.all(peak_cut[:,0::2],axis=1),numpy.all(~peak_cut[:,1::2],axis=1))))[0]
                        template_eventid = event_cut[0]
                        eventids = eventids[event_cut]
                        pol = 'hpol'
                    #eventids = eventids[numpy.logical_and(numpy.any(peak_cut,axis=1),rf_cut)]

                    tct.setTemplateToEvent(template_eventid)
                    choice_events = numpy.sort(numpy.random.choice(eventids,size=numpy.min((1000,len(eventids))),replace=False))
                    times, averaged_waveforms = tct.averageAlignedSignalsPerChannel( choice_events, align_method=0, template_eventid=None, plot=plot_aligned_wf,event_type=None)
                    

                    resampled_averaged_waveforms = numpy.zeros((8,len(tct.waveform_times_corr)))
                    resampled_averaged_waveforms_original_length = numpy.zeros((8,len(reader.t())))
                    for channel in range(8):
                        #Resampling averaged waveforms to be more compatible with cross correlation framework. 
                        resampled_averaged_waveforms[channel] = scipy.interpolate.interp1d(times,averaged_waveforms[channel],kind='cubic',bounds_error=False,fill_value=0)(tct.waveform_times_corr)
                        resampled_averaged_waveforms_original_length[channel] = scipy.interpolate.interp1d(times,averaged_waveforms[channel],kind='cubic',bounds_error=False,fill_value=0)(reader.t())

                    if plot_original_length_templates:
                        plt.figure()
                        for channel, wf in enumerate(resampled_averaged_waveforms_original_length):
                            plt.plot(reader.t(),wf,label='%i'%channel)
                        plt.xlabel('t (ns)')
                        plt.ylabel('adu (not digitized)')
                    if save_template:
                        numpy.savetxt('./template_77MHz_type%i.csv'%(event_type),resampled_averaged_waveforms_original_length, delimiter=",")
                    
                    #FFTs of resampled templates which will be used when performing cross correlation.
Пример #2
0
                        for event_index in best_eventids_indices[0:5]:
                            plotEvent(reader, eventids[event_index],
                                      hpol_snrs[event_index])

                        interesting_events_indices = numpy.where(
                            numpy.logical_and(hpol_snrs < 6,
                                              hpol_snrs > 5.5))[0]

                        for event_index in interesting_events_indices[0:5]:
                            plotEvent(reader, eventids[event_index],
                                      hpol_snrs[event_index])

                    else:
                        eventid = choose_eventid
                        reader.setEntry(eventid)
                        t = reader.t()

                        max_powers = numpy.zeros(hpol_beam_delays.shape[0])

                        power_sum_step = 8
                        N_original = int(len(t))
                        N_new = int(
                            numpy.ceil(N_original / power_sum_step) *
                            power_sum_step)
                        padded_wf = numpy.zeros((4, N_new))
                        new_t = numpy.arange(N_new) * (t[1] - t[0])

                        for i in range(4):
                            padded_wf[i][0:N_original] = reader.wf(2 * i)

                        binned_8_indices_A = numpy.arange(N_new).reshape(
    resample_factor = 1

    #Filter settings
    crit_freq_low_pass_MHz = 75
    crit_freq_high_pass_MHz = 35
    filter_order = 6
    plot_filter = True
    power_sum_cut_location = 50  #index
    power_sum_cut_value = 13000  #Events with larger power sum then this are ignored.
    peak_cut = 60  #At least one channel has to have a signal cross this thresh.

    for run_index, run in enumerate(runs):
        eventids = known_pulser_ids['run%i' % run]
        reader = Reader(datapath, run)
        waveform_times = reader.t()
        waveforms_upsampled = {}
        waveforms_raw = {}

        #Prepare filter
        reader.setEntry(98958)
        wf = reader.wf(0)
        wf, waveform_times = scipy.signal.resample(wf,
                                                   len(wf) * resample_factor,
                                                   t=reader.t())
        dt = waveform_times[1] - waveform_times[0]
        filter_y, freqs = makeFilter(waveform_times,
                                     crit_freq_low_pass_MHz,
                                     crit_freq_high_pass_MHz,
                                     filter_order,
                                     plot_filter=plot_filter)
Пример #4
0
def getSpectData(datapath,
                 run,
                 event_limit,
                 bin_size=10,
                 trigger_type=1,
                 group_fft=False):
    '''
    This function obtains the data for a spectrogram.

    Parameters
    ----------
    datapath : str
        The path to the data where the runs are stored.  This is the same as the input to
        the reader class.
    run : int
        The run number to be loaded.
    event_limit : int
        This limits the number of events to load.  Loads from beginning of run to end, so
        reducing this speeds up the calculation by cutting off the later portions of the
        run.
    bin_size : int
        This is the number of seconds to include in each time slice of the spectrogram.  The
        average spectra will be computed per bin.  Default is 10.
    trigger_type : int
        This is the trigger type of events included in the spectrogram.  The default is 1.
    group_fft : bool
        This enables the fft calculation to be performed simultaneously for all events, rather
        than per waveform as they are loaded in.  This may be faster but requires more memory.
        Default is False.

    Returns
    -------
    reader : examples.beacon_data_reader.Reader
        This is the reader for the selected run.
    freqs : numpy.ndarray of floats
        This is the list of frequencies for corresponding to the y-axis of the spectrogram data.
    spectra_dbish_binned : dict
        This is the data corresponding to the spectrogram.  Each entry in the dictionary contains
        the spectrogram data for a particular channel.  This are returned in dB-like units.  I.e.
        they are calculated as if the waveforms were in volts, but in reality the waveforms are in
        adu.  Some there is some offset from these values to true dB units.
    '''
    reader = Reader(datapath, run)
    N = reader.N() if event_limit == None else min(reader.N(),
                                                   abs(event_limit))

    print('\nReader:')
    d = tools.interpret.getReaderDict(reader)
    pprint(d)
    print('\nHeader:')
    h = tools.interpret.getHeaderDict(reader)
    pprint(h)
    print('\nStatus:')
    s = tools.interpret.getStatusDict(reader)
    pprint(s)

    if reader.N() == 0:
        print('No events found in the selected run.')
    else:

        def rfftWrapper(channel, waveform_times, *args, **kwargs):
            spec = numpy.fft.rfft(*args, **kwargs)
            real_power_multiplier = 2.0 * numpy.ones_like(
                spec
            )  #The factor of 2 because rfft lost half of the power except for dc and Nyquist bins (handled below).
            if len(numpy.shape(spec)) != 1:
                real_power_multiplier[:, [0, -1]] = 1.0
            else:
                real_power_multiplier[[0, -1]] = 1.0
            spec_dbish = 10.0 * numpy.log10(
                real_power_multiplier * spec * numpy.conj(spec) /
                len(waveform_times)
            )  #10 because doing power in log.  Dividing by N to match monutau.
            return channel, spec_dbish

        waveform_times = reader.t()
        freq_step = 1.0 / (len(waveform_times) *
                           (numpy.diff(waveform_times)[0] * 1e-9))
        freqs = numpy.arange(len(waveform_times) // 2 + 1) * freq_step
        freq_nyquist = 1 / (2.0 * numpy.diff(waveform_times)[0] * 1e-9)

        if group_fft == True:
            waveforms = {}
        spectra_dbish = {}
        readout_times = []

        for channel in range(8):
            if group_fft == True:
                waveforms['ch%i' % channel] = numpy.zeros(
                    (N, reader.header().buffer_length), dtype=int)
            spectra_dbish['ch%i' % channel] = numpy.zeros(
                (N, reader.header().buffer_length // 2 + 1), dtype=float)

        print('')

        for event_index, eventid in enumerate(
                range(N if event_limit == None else event_limit)):
            sys.stdout.write('\r(%i/%i)' % (eventid + 1, N))
            sys.stdout.flush()
            reader.setEntry(eventid)
            readout_times.append(getattr(reader.header(), 'readout_time'))
            for channel in range(8):
                if group_fft == True:
                    waveforms['ch%i' %
                              channel][event_index] = reader.wf(channel)
                else:
                    spectra_dbish['ch%i' % channel][event_index] = rfftWrapper(
                        'ch%i' % channel, waveform_times,
                        reader.wf(channel))[1]
        if group_fft == True:
            with concurrent.futures.ThreadPoolExecutor(
                    max_workers=cpu_count()) as executor:
                thread_results = []
                for channel in range(8):
                    thread_results.append(
                        executor.submit(rfftWrapper, 'ch%i' % channel,
                                        waveform_times,
                                        waveforms['ch%i' % channel]))

            print('Weaving threads')
            sys.stdout.flush()

            for index, future in enumerate(
                    concurrent.futures.as_completed(thread_results)):
                spectra_dbish[future.result()[0]] = future.result()[1]
                print('%i/8 Channel FFTs Completed' % (index + 1))

        bin_edges = numpy.arange(min(readout_times),
                                 max(readout_times) + bin_size, bin_size)
        bin_L_2d = numpy.tile(bin_edges[:-1], (len(readout_times), 1))
        bin_R_2d = numpy.tile(
            numpy.roll(bin_edges, -1)[:-1], (len(readout_times), 1))
        readout_times_2d = numpy.tile(readout_times, (len(bin_edges) - 1, 1)).T

        cut_2d = numpy.logical_and(readout_times_2d >= bin_L_2d,
                                   readout_times_2d < bin_R_2d).T

        del bin_L_2d
        del bin_R_2d
        del readout_times_2d

        spectra_dbish_binned = {}
        for channel in range(8):
            spectra_dbish_binned['ch%i' % channel] = numpy.zeros(
                (len(freqs), len(bin_edges) - 1))
            for index, cut in enumerate(cut_2d):
                spectra_dbish_binned['ch%i' % channel][:, index] = numpy.mean(
                    spectra_dbish['ch%i' % channel][cut], axis=0)
            spectra_dbish_binned['ch%i' % channel] = numpy.flipud(
                numpy.ma.array(spectra_dbish_binned['ch%i' % channel],
                               mask=numpy.isnan(
                                   spectra_dbish_binned['ch%i' % channel])))

        return reader, freqs, spectra_dbish_binned
                print('Farm Mode = False')
                calculate_correlation_values = False #If True then the values we be newly calculated, if false then will try to load them from the existing files
            #Parameters:
            #Curve choice is a parameter in the bi-delta template model that changes the timing of the input dela signal.
            curve_choice = 0
            upsample_factor = 4
            save_data = True

            if farm_mode == False:
                plt.close('all')
            run = int(sys.argv[1])
            reader = Reader(datapath,run)

            #Prepare for Correlations
            reader.setEntry(0)
            waveform_times = reader.t()
            waveform_sample = reader.wf(0)
            waveform_sample, waveform_times = scipy.signal.resample(waveform_sample,len(waveform_sample)*upsample_factor,t=waveform_times) #upsample times to desired amount.

            cr_gen = crt.CosmicRayGenerator(waveform_times,t_offset=800.0,model='bi-delta')
            template_t, template_E = cr_gen.eFieldGenerator(plot=True,curve_choice=curve_choice)
            
            len_t = len(template_t)
            template_E = template_E/(numpy.std(template_E)*len_t) #Pre dividing to handle normalization of cross correlation.
            

            if calculate_correlation_values == True:
                try:
                    print(reader.status())
                except Exception as e:
                    print('Status Tree not present.  Returning Error.')
Пример #6
0
            extra_text = 'site_2_bicone_vpol_17dB'
            numpy.savetxt('./run%i_pulser_eventids_%s.csv' % (run, extra_text),
                          numpy.sort(eventids[cut]),
                          delimiter=",")

        meas = {}
        for channel in range(8):
            meas[channel] = []

        for event_index, eventid in enumerate(eventids):
            if eventid < event_min:
                continue
            sys.stdout.write('(%i/%i)\r' % (event_index, len(eventids)))
            sys.stdout.flush()
            reader.setEntry(eventid)
            event_times = reader.t()
            for channel in range(8):
                channel = int(channel)
                meas[channel].append(
                    numpy.max(reader.wf(channel)) -
                    numpy.min(reader.wf(channel)))

        if run == 1507:
            lines_of_interest = numpy.array(
                [1035, 1275, 1755, 10034, 11714, 12314, 12914, 15711, 17354])
        elif run == 1509:
            lines_of_interest = numpy.array(
                [721, 1201, 1801, 2401, 3722, 4082, 4201])
        elif run == 1511:
            lines_of_interest = numpy.array([892, 2690, 3892, 5812])
Пример #7
0
                        if not numpy.isin(template_filename_root,
                                          correlation_dsets):
                            file['template_correlations'].create_dataset(
                                template_filename_root, (file.attrs['N'], 8),
                                dtype='f',
                                compression='gzip',
                                compression_opts=4,
                                shuffle=True)
                        else:
                            print('%s group already exists in file %s' %
                                  (template_filename_root, filename))

                        template = numpy.loadtxt(template_filename,
                                                 delimiter=',')
                        template_std = numpy.std(template, axis=1) * len(
                            reader.t())  #Normalization factor

                        for eventid in eventids:
                            if eventid % 1000 == 0:
                                sys.stdout.write('\r%i/%i' %
                                                 (eventid, len(eventids) - 1))
                                sys.stdout.flush()
                            reader.setEntry(eventid)
                            for antenna in range(8):
                                signal = reader.wf(antenna)
                                std = numpy.std(signal)
                                c = scipy.signal.correlate(
                                    template[antenna],
                                    signal) / (std * template_std[antenna])
                                correlation_values[eventid,
                                                   antenna] = numpy.max(c)
Пример #8
0
    def alignSelectedEvents(self, plot_aligned_wf=False,save_template=False,plot_timedelays=True):
        '''
        My plan is for this to be called when some events are circled in the plot.
        It will take those wf, align them, and plot the averaged waveforms.  No
        filters will be applied. 
        '''
        if plot_timedelays == True:
            runs, counts = numpy.unique(self.id[self.ind][:,0],return_counts=True)
            run = runs[numpy.argmax(counts)]
            print('Only calculating template from run with most points circled: run %i with %i events circled'%(run,max(counts)))
            eventids = self.id[self.ind][:,1][self.id[self.ind][:,0] == run]
            coords = self.xys[self.ind]

            self.plotTimeDelays(self.xys[self.ind][:,0]*60,self.total_hpol_delays[self.ind],self.total_vpol_delays[self.ind])

        _reader = Reader(datapath,run)
        
        crit_freq_low_pass_MHz = None
        low_pass_filter_order = None
        
        crit_freq_high_pass_MHz = None# 45
        high_pass_filter_order = None# 12
        
        waveform_index_range = (None,None)
        
        final_corr_length = 2**18

        tct = TemplateCompareTool(_reader, final_corr_length=final_corr_length, crit_freq_low_pass_MHz=crit_freq_low_pass_MHz, crit_freq_high_pass_MHz=crit_freq_high_pass_MHz, low_pass_filter_order=low_pass_filter_order, high_pass_filter_order=high_pass_filter_order, waveform_index_range=waveform_index_range, plot_filters=False,apply_phase_response=True)
        tdc = TimeDelayCalculator(_reader, final_corr_length=final_corr_length, crit_freq_low_pass_MHz=crit_freq_low_pass_MHz, crit_freq_high_pass_MHz=crit_freq_high_pass_MHz, low_pass_filter_order=low_pass_filter_order, high_pass_filter_order=high_pass_filter_order, waveform_index_range=waveform_index_range, plot_filters=False,apply_phase_response=True)
        self.cor = Correlator(_reader,  upsample=2**15, n_phi=360, n_theta=360, waveform_index_range=(None,None),crit_freq_low_pass_MHz=crit_freq_low_pass_MHz, crit_freq_high_pass_MHz=crit_freq_high_pass_MHz, low_pass_filter_order=low_pass_filter_order, high_pass_filter_order=high_pass_filter_order, plot_filter=False,apply_phase_response=True)
        
        if True:
            print('TRYING TO MAKE CORRELATOR PLOT.')
            print(eventids)
            self.cor.animatedMap(eventids, 'both', '', plane_zenith=None,plane_az=None,hilbert=False, max_method=None,center_dir='E',save=False,dpi=300)

        times, averaged_waveforms = tct.averageAlignedSignalsPerChannel( eventids, align_method=0, template_eventid=eventids[0], plot=plot_aligned_wf,event_type=None)
        
        resampled_averaged_waveforms_original_length = numpy.zeros((8,len(_reader.t())))
        for channel in range(8):
            resampled_averaged_waveforms_original_length[channel] = scipy.interpolate.interp1d(times,averaged_waveforms[channel],kind='cubic',bounds_error=False,fill_value=0)(reader.t())

        if False:
            for channel in range(8):
                plt.figure()
                plt.title(str(channel))
                for eventid in eventids:
                    tct.setEntry(eventid)
                    plt.plot(tct.t(),tct.wf(channel),label=str(eventid),alpha=0.8)
                plt.legend()
                plt.xlabel('t (ns)')
                plt.ylabel('adu')


        if save_template == True:
            filename_index = 0 
            filename = './generated_event_template_%i.csv'%filename_index
            existing_files = numpy.array(glob.glob('./*.csv'))

            while numpy.isin(filename,existing_files):
                filename_index += 1
                filename = './generated_event_template_%i.csv'%filename_index
            numpy.savetxt(filename,resampled_averaged_waveforms_original_length, delimiter=",")
            print('Genreated template saved as:\n%s'%filename)



        tdc.calculateMultipleTimeDelays(eventids, align_method=8,hilbert=False,plot=True, colors=numpy.array(coords)[:,0])

        return resampled_averaged_waveforms_original_length
Пример #9
0
                pprint(tools.interpret.getReaderDict(reader))
                print('\nHeader:')
                pprint(tools.interpret.getHeaderDict(reader))
                print('\nStatus:')
                pprint(tools.interpret.getStatusDict(reader))

            reader.header().Dump()
            reader.status().Dump()
            #print reader.N()

            # plot all waveforms
            plt.figure()
            for i in range(4):
                if i == 0:
                    ax = plt.subplot(4, 1, i + 1)
                    plt.plot(reader.t() - 5200, reader.wf(2 * i))
                else:
                    plt.subplot(4, 1, i + 1, sharex=ax, sharey=ax)
                    plt.plot(reader.t() - 5200, reader.wf(2 * i))
                if i in [0, 1, 2, 3]:
                    plt.ylabel('V (adu)')
                if i == 3:
                    plt.xlabel('t (ns)')
                plt.suptitle('Run %i, Event %i, Hpol' % (run, eid))
                plt.xlim(0, 600)
                plt.minorticks_on()
                plt.grid(b=True, which='major', color='k', linestyle='-')
                plt.grid(b=True,
                         which='minor',
                         color='tab:gray',
                         linestyle='--',
Пример #10
0
            print(e)
            exc_type, exc_obj, exc_tb = sys.exc_info()
            fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
            print(exc_type, fname, exc_tb.tb_lineno)


if __name__ == '__main__':
    try:
        plt.close('all')
        #Get timing info from real BEACON data for testing.
        run = 1509
        known_pulser_ids = info.loadPulserEventids(remove_ignored=True)
        eventid = known_pulser_ids['run%i' % run]['hpol'][0]
        reader = Reader(datapath, run)
        reader.setEntry(eventid)
        test_t = reader.t()
        test_pulser_adu = reader.wf(0)

        #Creating test signal
        cr_gen = CosmicRayGenerator(test_t, t_offset=800.0, model='bi-delta')
        for curve_choice in range(4):
            out_t, out_E = cr_gen.eFieldGenerator(plot=True,
                                                  curve_choice=curve_choice)

        plt.figure()
        plt.subplot(2, 1, 1)
        plt.plot(test_t, test_pulser_adu, label='Pulser Signal')
        plt.ylabel('E (adu)')
        plt.xlabel('t (ns)')

        plt.legend()
Пример #11
0
                    choice_events = numpy.sort(
                        numpy.random.choice(eventids,
                                            size=numpy.min(
                                                (1000, len(eventids))),
                                            replace=False))
                    times, averaged_waveforms = tct.averageAlignedSignalsPerChannel(
                        choice_events,
                        align_method=0,
                        template_eventid=None,
                        plot=plot_aligned_wf,
                        event_type=None)

                    resampled_averaged_waveforms = numpy.zeros(
                        (8, len(tct.waveform_times_corr)))
                    resampled_averaged_waveforms_original_length = numpy.zeros(
                        (8, len(reader.t())))
                    for channel in range(8):
                        #Resampling averaged waveforms to be more compatible with cross correlation framework.
                        resampled_averaged_waveforms[
                            channel] = scipy.interpolate.interp1d(
                                times,
                                averaged_waveforms[channel],
                                kind='cubic',
                                bounds_error=False,
                                fill_value=0)(tct.waveform_times_corr)
                        resampled_averaged_waveforms_original_length[
                            channel] = scipy.interpolate.interp1d(
                                times,
                                averaged_waveforms[channel],
                                kind='cubic',
                                bounds_error=False,
Пример #12
0
    then plot 1 event from each trigger type.
    '''
    if True:
        plot_N_per_type = 2  #The number of events to plot her trigger type.  Meant to demonstrate what looping over events might look like.

        #Get run and events you want to look at.
        run = 1650
        #Create a Reader object for the specific run.
        reader = Reader(datapath, run)
        print('The run associated with this reader is:')
        print(reader.run)
        print('This run has %i events' % (reader.N()))
        eventids = numpy.arange(reader.N())
        trigger_type = loadTriggerTypes(reader)

        times = reader.t()  #The times of a waveform in ns.  Not upsampled.

        for trig_type in [1, 2, 3]:
            print('Plotting %i eventids of trig type %i' %
                  (plot_N_per_type, trig_type))
            trig_eventids = eventids[
                trigger_type == trig_type]  #All eventids of this trig type
            trig_eventids = numpy.sort(
                numpy.random.choice(trig_eventids, 2)
            )  #Randomly choosing a subset and sorting for faster loading of events

            for eventid in trig_eventids:
                reader.setEntry(
                    eventid
                )  #Actually makes the wf function adress the correct event.
Пример #13
0
    #General Prep
    channels = numpy.arange(8,dtype=int)
    
    #Main loop
    for run_index, run in enumerate(runs):
        if 'run%i'%run in list(known_pulser_ids.keys()):
            try:
                if 'run%i'%run in list(ignorable_pulser_ids.keys()):
                    eventids = numpy.sort(known_pulser_ids['run%i'%run][~numpy.isin(known_pulser_ids['run%i'%run],ignorable_pulser_ids['run%i'%run])])
                else:
                    eventids = numpy.sort(known_pulser_ids['run%i'%run])

                reader = Reader(datapath,run)
                reader.setEntry(eventids[0])
                
                waveform_times = reader.t()
                dt = waveform_times[1]-waveform_times[0]
                waveform_times_padded_to_power2 = numpy.arange(2**(numpy.ceil(numpy.log2(len(waveform_times)))))*dt #Rounding up to a factor of 2 of the len of the waveforms  USED FOR WAVEFORMS
                waveform_times_corr = numpy.arange(2*len(waveform_times_padded_to_power2))*dt #multiplying by 2 for cross correlation later. USED FOR CORRELATIONS
                
                if use_filter:
                    filter_y_corr,freqs_corr = makeFilter(waveform_times_corr,crit_freq_low_pass_MHz, crit_freq_high_pass_MHz, filter_order,plot_filter=True)
                    filter_y_wf,freqs_wf = makeFilter(waveform_times_padded_to_power2,crit_freq_low_pass_MHz, crit_freq_high_pass_MHz, filter_order,plot_filter=False)
                else:
                    freqs_corr = numpy.fft.rfftfreq(len(waveform_times_corr), d=(waveform_times_corr[1] - waveform_times_corr[0])/1.0e9)
                    freqs_wf = numpy.fft.rfftfreq(len(waveform_times_padded_to_power2), d=(waveform_times_padded_to_power2[1] - waveform_times_padded_to_power2[0])/1.0e9)

                df_corr = freqs_corr[1] - freqs_corr[0] #Note that this is the df for the padded correlation ffts and would not be the same as the one for the normal waveform ffts which have not been doubled in length. 
                final_dt_corr = 1e9/(2*(final_corr_length//2 + 1)*df_corr) #ns #This is the time step resulting from the cross correlation.  

                time_shifts_corr = numpy.arange(-(final_corr_length-1)//2,(final_corr_length-1)//2 + 1)*final_dt_corr #This results in the maxiumum of an autocorrelation being located at a time shift of 0.0
Пример #14
0
    #Filter settings
    crit_freq_low_pass_MHz = 75
    crit_freq_high_pass_MHz = 15
    filter_order = 6
    plot_filter = True
    use_envelopes = False
    use_raw = True
    bins = 200
    expected_timing_pm_tol = 20 #ns
    corr_plot = True

    for run_index, run in enumerate(runs):
        eventids = numpy.sort(known_pulser_ids['run%i'%run])
        reader = Reader(datapath,run)

        waveform_times = reader.t()
        waveforms_upsampled = {}
        waveforms_raw = {}

        #Prepare filter
        reader.setEntry(eventids[0])
        wf = reader.wf(0)
        if use_raw:
            if resample_factor != 1:
                print('\n!!!\nUsing raw waveforms for alignment.  Setting the resample factor to 1.\n!!!\n') 
                resample_factor = 1

        wf , waveform_times = scipy.signal.resample(wf,len(wf)*resample_factor,t=reader.t())
        dt = waveform_times[1] - waveform_times[0]
        filter_y,freqs = makeFilter(waveform_times,crit_freq_low_pass_MHz, crit_freq_high_pass_MHz, filter_order, plot_filter=plot_filter)