def collection_procedure(filename):
    # File import -----------------------------------------------------------
    event_collection = CImporterEventsDualEnergy.import_data(filename, 0)
    print("#### Opening file ####")
    print(filename)
    # Energy discrimination -------------------------------------------------
    CEnergyDiscrimination.discriminate_by_energy(event_collection, low_threshold_kev=425,
                                                 high_threshold_kev=700)

    # Filtering of unwanted photon types ------------------------------------
    event_collection.remove_unwanted_photon_types(remove_thermal_noise=False, remove_after_pulsing=False,
                                                  remove_crosstalk=False, remove_masked_photons=True)

    event_collection.save_for_hardware_simulator()

    # Sharing of TDCs --------------------------------------------------------
    # event_collection.apply_tdc_sharing(pixels_per_tdc_x=1, pixels_per_tdc_y=1)

    # First photon discriminator ---------------------------------------------
    # DiscriminatorMultiWindow.DiscriminatorMultiWindow(event_collection)
    DiscriminatorDualWindow.DiscriminatorDualWindow(event_collection)

    # Making of coincidences -------------------------------------------------
    coincidence_collection = CCoincidenceCollection(event_collection)

    # Apply TDC - Must be applied after making the coincidences because the
    # coincidence adds a random time offset to pairs of events
    tdc = CTdc(system_clock_period_ps=5000, tdc_bin_width_ps=1, tdc_jitter_std=1)
    tdc.get_sampled_timestamps(coincidence_collection.detector1)
    tdc.get_sampled_timestamps(coincidence_collection.detector2)

    return event_collection, coincidence_collection
Ejemplo n.º 2
0
def main_loop():

    # Parse input
    parser = argparse.ArgumentParser(description='Process data out of the Spad Simulator')
    parser.add_argument("filename", help='The file path of the data to import')
    args = parser.parse_args()

    # File import --------------------------------------------------------------------------------------------------
    event_collection = CImporterEventsDualEnergy.import_data(args.filename, 0)

    # Energy discrimination ----------------------------------------------------------------------------------------
    CEnergyDiscrimination.discriminate_by_energy(event_collection, low_threshold_kev=425, high_threshold_kev=700)

    # Filtering of unwanted photon types ---------------------------------------------------------------------------
    event_collection.remove_unwanted_photon_types(remove_thermal_noise=False, remove_after_pulsing=False, remove_crosstalk=False, remove_masked_photons=False)

    event_collection.save_for_hardware_simulator()

    # Sharing of TDCs --------------------------------------------------------------------------------------------------
    event_collection.apply_tdc_sharing( pixels_per_tdc_x=5, pixels_per_tdc_y=5)

    # First photon discriminator -----------------------------------------------------------------------------------
    DiscriminatorDualWindow.DiscriminatorDualWindow(event_collection)
    #DiscriminatorWindowDensity.DiscriminatorWindowDensity(event_collection, qty_photons_to_keep=9)
    #DiscriminatorForwardDelta.DiscriminatorForwardDelta(event_collection, qty_photons_to_keep=3)

    # Making of coincidences ---------------------------------------------------------------------------------------
    coincidence_collection = CCoincidenceCollection(event_collection)

    # Apply TDC - Must be applied after making the coincidences because the coincidence adds a random time offset to pairs of events
    tdc = CTdc(system_clock_period_ps = 4000, tdc_bin_width_ps = 10, tdc_jitter_std =10)
    tdc.get_sampled_timestamps(coincidence_collection.detector1)
    tdc.get_sampled_timestamps(coincidence_collection.detector2)

    max_order = 8

    if(max_order > event_collection.qty_of_photons):
        max_order = event_collection.qty_of_photons

    print "\n### Calculating time resolution for different algorithms ###"
    # Running timing algorithms ------------------------------------------------------------------------------------
    for i in range(1, max_order):
        algorithm = CAlgorithmSinglePhoton(photon_count=i)
        run_timing_algorithm(algorithm, coincidence_collection)

    for i in range(2, max_order):
        algorithm = CAlgorithmBlueExpectationMaximisation(coincidence_collection, photon_count=i)
        run_timing_algorithm(algorithm, coincidence_collection)
def collection_procedure(filename, number_of_events=0, min_photons=np.NaN):
    # File import -----------------------------------------------------------
    importer = ImporterRoot()
    importer.open_root_file(filename)
    event_collection = importer.import_all_spad_events(number_of_events)
    print("#### Opening file ####")
    print(filename)
    print(event_collection.qty_spad_triggered)
    # Energy discrimination -------------------------------------------------
    event_collection.remove_events_with_too_many_photons()
    CEnergyDiscrimination.discriminate_by_energy(
        event_collection, low_threshold_kev=0, high_threshold_kev=700)

    # Filtering of unwanted photon types ------------------------------------
    event_collection.remove_unwanted_photon_types(
        remove_thermal_noise=False,
        remove_after_pulsing=False,
        remove_crosstalk=False,
        remove_masked_photons=True)

    event_collection.save_for_hardware_simulator()

    # Sharing of TDCs --------------------------------------------------------
    # event_collection.apply_tdc_sharing(pixels_per_tdc_x=1, pixels_per_tdc_y=1)

    # First photon discriminator ---------------------------------------------
    # DiscriminatorMultiWindow.DiscriminatorMultiWindow(event_collection)
    DiscriminatorDualWindow.DiscriminatorDualWindow(event_collection,
                                                    min_photons)
    #event_collection.remove_events_with_fewer_photons(100)

    # Apply TDC - Must be applied after making the coincidences because the
    # coincidence adds a random time offset to pairs of events
    #tdc = CTdc(system_clock_period_ps=5000, tdc_bin_width_ps=1, tdc_jitter_std=1)
    #tdc.get_sampled_timestamps(event_collection)
    #tdc.get_sampled_timestamps(coincidence_collection.detector2)

    # Making of coincidences -------------------------------------------------
    coincidence_collection = CCoincidenceCollection(event_collection)

    return event_collection, coincidence_collection
def main_loop():
    collection_511_filename = "/home/cora2406/FirstPhotonEnergy/spad_events/LYSO1110_TW.root"
    collection_662_filename = "/home/cora2406/FirstPhotonEnergy/spad_events/LYSO1110_TW_662.root"
    collection_1275_filename = "/home/cora2406/FirstPhotonEnergy/spad_events/LYSO1110_TW_1275.root"

    event_count=50000

    coll_511_events, coll_511_coincidences = collection_procedure(collection_511_filename, event_count)

    plt.figure()
    plt.scatter(coll_511_events.qty_of_incident_photons, coll_511_events.qty_spad_triggered)
    p0 = [500, 1e-4, 0]

    popt, pcov = curve_fit(neg_exp_func, coll_511_events.qty_of_incident_photons, coll_511_events.qty_spad_triggered, p0)

    fit_a = popt[0]
    fit_b = popt[1]
    fit_c = popt[2]

    #print(popt)

    fit_x = np.arange(0, 10000.0)
    fit_y = neg_exp_func(fit_x, fit_a, fit_b, fit_c)
    plt.plot(fit_x, fit_y, 'r')

    plt.figure()
    plt.scatter(coll_511_events.qty_spad_triggered, coll_511_events.qty_of_incident_photons)

    popt, pcov = curve_fit(exp_func, coll_511_events.qty_spad_triggered, coll_511_events.qty_of_incident_photons, p0)

    fit_a = popt[0]
    fit_b = popt[1]
    fit_c = popt[2]

    #print(popt)

    test_x = np.arange(0, 1000.0)
    test_y = exp_func(test_x, fit_a, fit_b, fit_c)
    plt.plot(test_x, test_y, 'r')

    #plt.figure()
    CEnergyDiscrimination.display_linear_energy_spectrum(coll_511_events, 128)
    #print(coll_511_events.kev_energy)

    mip = 50
    energy_thld_kev= 250
    energy_thld = np.zeros(coll_511_events.qty_of_events)

    Full_event_photopeak = np.logical_and(np.less_equal(coll_511_events.kev_energy, 700),
                                          np.greater_equal(coll_511_events.qty_spad_triggered, energy_thld_kev))

    energy_thld[0:coll_511_events.qty_of_events] = coll_511_events.timestamps[:, mip] - coll_511_events.timestamps[:, 0]
    p0 = [10000, -0.005, 100]
    popt, pcov = curve_fit(exp_func, coll_511_events.kev_energy, energy_thld, p0)

    print popt

    x = np.arange(0, 700)
    y = exp_func(x, popt[0], popt[1], popt[2])

    plt.figure()
    plt.scatter(coll_511_events.kev_energy, energy_thld)
    plt.plot(x,y,'r')
    plt.show()

    timing_threshold = exp_func(energy_thld, popt[0], popt[1], popt[2])

    estimation_photopeak = np.logical_and(np.less_equal(energy_thld[0:event_count], timing_threshold),
                                                  np.greater_equal(energy_thld[0:event_count], 0))

    True_positive, True_negative, False_positive, False_negative = \
        confusion_matrix(estimation_photopeak, Full_event_photopeak)

    true_positive_count = np.count_nonzero(True_positive)
    true_negative_count= np.count_nonzero(True_negative)
    false_positive_count = np.count_nonzero(False_positive)
    false_negative_count = np.count_nonzero(False_negative)
    success = (np.count_nonzero(True_positive) + np.count_nonzero(True_negative)) / float(coll_511_events.qty_of_events)

    print("#### The agreement results for photon #{0} are : ####".format(mip))
    print("True positive : {0}    True negative: {1}".format(true_positive_count, true_negative_count))
    print("False positive : {0}   False negative: {1}".format(false_positive_count, false_negative_count))

    print("For an agreement of {0:02.2%}\n".format(success))

    p0 = [500, -0.01, 50]
    popt, pcov = curve_fit(exp_func, energy_thld, coll_511_events.kev_energy, p0)

    print popt

    x = np.arange(50, 10000)
    y = exp_func(x, popt[0], popt[1], popt[2])

    plt.figure()
    plt.scatter(energy_thld, coll_511_events.kev_energy)
    plt.plot(x,y,'r')
    plt.show()

    linear_energy = exp_func(energy_thld, popt[0], popt[1], popt[2])

    plt.figure()
    plt.hist(linear_energy, 128)

    photopeak_mean, photopeak_sigma, photopeak_amplitude = CEnergyDiscrimination.fit_photopeak(linear_energy, 128)
    peak_energy = 511
    k = peak_energy/photopeak_mean
    # event_collection.kev_energy = linear_energy*k
    kev_peak_sigma = k*photopeak_sigma
    kev_peak_amplitude = k*photopeak_amplitude

    fwhm_ratio = 2*np.sqrt(2*np.log(2))

    time_linear_energy_resolution = ((100*kev_peak_sigma*fwhm_ratio)/peak_energy)
    print("Linear energy resolution is {0:.2f} %".format(time_linear_energy_resolution))

    x = np.linspace(0, 700, 700)
    plt.plot(x, kev_peak_amplitude*mlab.normpdf(x, peak_energy/k, kev_peak_sigma), 'r')
    plt.show()
def main_loop():
    matplotlib.rc('xtick', labelsize=8)
    matplotlib.rc('ytick', labelsize=8)
    matplotlib.rc('legend', fontsize=8)
    font = {'family': 'normal',
            'size': 8}

    matplotlib.rc('font', **font)
    nb_events = 50000
    nbins = 1000
    max_time=1500
    energy_thld_kev= 250
    energy_thld = np.zeros(nb_events)

    pp = PdfPages("/home/cora2406/FirstPhotonEnergy/results/Threshold_Relative_LYSO1110_TW_300Hz_250keV.pdf")
    filename = "/home/cora2406/FirstPhotonEnergy/spad_events/LYSO1110_TW_300Hz.root"
    result_file = "/home/cora2406/FirstPhotonEnergy/results/LYSO1110_TW_300Hz_250keV.npz"

    event_collection, coincidence_collection = collection_procedure(filename, nb_events, 100)
    high_energy_collection = copy.deepcopy(event_collection)
    low_energy_collection = copy.deepcopy(event_collection)
    low, high = CEnergyDiscrimination.discriminate_by_energy(high_energy_collection, energy_thld_kev, 700)

    CEnergyDiscrimination.discriminate_by_energy(low_energy_collection, 0, energy_thld_kev)
    # CEnergyDiscrimination.display_energy_spectrum(low_energy_collection)

    # Timing algorithm check
    max_single_photon = 8
    max_BLUE = 10

    tr_sp_fwhm = np.zeros(max_single_photon)
    tr_BLUE_fwhm = np.zeros(max_BLUE)

    if (max_single_photon > event_collection.qty_of_photons):
        max_single_photon = event_collection.qty_of_photons

    print "\n### Calculating time resolution for different algorithms ###"

    # plt.figure(1)
    # plt.hist(event_collection.trigger_type.flatten())

    # Running timing algorithms ------------------------------------------------
    for p in range(1, max_single_photon):
        algorithm = CAlgorithmSinglePhoton(photon_count=p)
        tr_sp_fwhm[p - 1] = run_timing_algorithm(algorithm, coincidence_collection)

    if (max_BLUE > event_collection.qty_of_photons):
        max_BLUE = event_collection.qty_of_photons

    for p in range(2, max_BLUE):
        algorithm = CAlgorithmBlueExpectationMaximisation(coincidence_collection, photon_count=p)
        tr_BLUE_fwhm[p - 2] = run_timing_algorithm(algorithm, coincidence_collection)

    # Grab original energy deposit
    geant4_filename = "/media/My Passport/Geant4_Scint/LYSO_1x1x10_TW.root"
    importer = ImporterRoot()
    importer.open_root_file(geant4_filename)
    event_id, true_energy = importer.import_true_energy(nb_events)
    importer.close_file()

    j = 0
    delete_list = []
    ref_delete_list = []
    for i in range(0, np.size(event_id)):
        if j >= event_collection.qty_of_events:
            delete_list.append(i)
        elif event_collection.event_id[j] != event_id[i]:
            delete_list.append(i)
            if event_id[i] > event_collection.event_id[j]:
                ref_delete_list.append(j)
                j += 1
        else:
            j += 1

    event_id = np.delete(event_id, delete_list)
    true_energy = np.delete(true_energy, delete_list)
    bool_delete_list = np.ones(np.shape(event_collection.event_id), dtype=bool)
    bool_delete_list[ref_delete_list] = False
    event_collection.delete_events(bool_delete_list)

    if np.shape(event_id)[0] != event_collection.qty_of_events:
        print(np.shape(event_id), event_collection.qty_of_events)
        raise ValueError("The shapes aren't the same.")

    True_event_photopeak = np.logical_and(np.less_equal(true_energy, 0.7),
                                          np.greater_equal(true_energy, energy_thld_kev/1000.0))

    Full_event_photopeak = np.logical_and(np.less_equal(event_collection.qty_spad_triggered, high),
                                          np.greater_equal(event_collection.qty_spad_triggered, low))
    # Energy algorithms testing

    #mips = range(10, 100, 5)
    mips = [10, 30, 35, 40, 45, 50, 55, 60]
    #percentiles = [85, 90, 92.5, 95, 97.5, 98, 99, 99.9]
    percentiles = [99.999, 99.9999, 99.99999, 99.999999]
    event_count = event_collection.qty_of_events
    true_positive_count = np.zeros((np.size(mips), np.size(percentiles), 2))
    true_negative_count = np.zeros((np.size(mips), np.size(percentiles), 2))
    false_positive_count = np.zeros((np.size(mips), np.size(percentiles), 2))
    false_negative_count = np.zeros((np.size(mips), np.size(percentiles), 2))
    success = np.zeros((np.size(mips), np.size(percentiles), 2))

    for i, mip in enumerate(mips):

        event_collection.remove_events_with_fewer_photons(mip)
        try :
            energy_thld[0:event_count] = event_collection.timestamps[:, mip] - event_collection.timestamps[:, 0]
        except IndexError:
            print("Events with not enough photons remain")
            continue

        if mip > 45:
            nbins = 2*nbins
            max_time=2000
        [hist, bin_edges] = np.histogram(energy_thld[0:event_count], nbins, range=(np.min(energy_thld), 10000))

        bins = bin_edges[0:-1] + ((bin_edges[1] - bin_edges[0]) / 2)

        for j, percentile in enumerate(percentiles):
            try:
                cutoff, cutoff_bin = find_energy_threshold(bins, hist, percentile, max_time)
            except RuntimeError:
                print("Could not resolve photopeak")
                continue

            print("Cutoff was set at {0} which is bin {1}".format(cutoff, cutoff_bin))

            estimation_photopeak = np.logical_and(np.less_equal(energy_thld[0:event_count], cutoff),
                                                  np.greater_equal(energy_thld[0:event_count], 0))

            True_positive, True_negative, False_positive, False_negative = \
                confusion_matrix(estimation_photopeak, Full_event_photopeak)

            true_positive_count[i, j, 0] = np.count_nonzero(True_positive)
            true_negative_count[i, j, 0] = np.count_nonzero(True_negative)
            false_positive_count[i, j, 0] = np.count_nonzero(False_positive)
            false_negative_count[i, j, 0] = np.count_nonzero(False_negative)
            success[i, j, 0] = (np.count_nonzero(True_positive) + np.count_nonzero(True_negative)) / float(event_collection.qty_of_events)

            print("#### The agreement results for photon #{0} are : ####".format(mip))
            print("True positive : {0}    True negative: {1}".format(true_positive_count[i, j, 0], true_negative_count[i, j, 0]))
            print("False positive : {0}   False negative: {1}".format(false_positive_count[i, j, 0], false_negative_count[i, j, 0]))

            print("For an agreement of {0:02.2%}\n".format(success[i, j, 0]))

            f, (ax1, ax2, ax3, ax4) = plt.subplots(4)
            f.subplots_adjust(hspace=0.5)
            index = np.logical_or(True_positive, True_negative)
            ETT = energy_thld[index]
            index = np.logical_or(False_positive, False_negative)
            ETTF = energy_thld[index]
            ax1.hist([ETT, ETTF], 256, stacked=True, color=['blue', 'red'])
            ax1.axvline(bins[cutoff_bin], color='green', linestyle='dashed', linewidth=2)
            ax1.set_xlabel('Arrival time of selected photon (ps)', fontsize=8)
            x_max_lim = round(np.max(energy_thld))/3
            # x_max_lim = 10000
            ax1.set_xlim([0, x_max_lim])
            ax1.set_ylabel('Counts', fontsize=8)
            ax1.set_title('Energy based on photon #{0} for {1}th percentile'.format(mip, percentile), fontsize=10)

            index = np.logical_or(True_positive, True_negative)
            ETT = event_collection.qty_spad_triggered[index]
            index = np.logical_or(False_positive, False_negative)
            ETTF = event_collection.qty_spad_triggered[index]
            ax2.hist([ETT, ETTF], 75, stacked=True, color=['blue', 'red'])
            ax2.axvline(low, color='green', linestyle='dashed', linewidth=2)
            ax2.set_xlabel('Total number of SPADs triggered', fontsize=8)
            ax2.set_ylabel('Counts', fontsize=8)
            x_legend_position = 300
            y_legend_position = ax2.get_ylim()[1]/2
            ax2.text(x_legend_position, y_legend_position, '{0:02.2%} agreement'.format(success[i, j, 0]))

            # plt.figure(1)
            # plt.hist(event_collection.trigger_type.flatten())
            True_positive, True_negative, False_positive, False_negative = \
                confusion_matrix(estimation_photopeak, True_event_photopeak)

            true_positive_count[i, j, 1] = np.count_nonzero(True_positive)
            true_negative_count[i, j, 1] = np.count_nonzero(True_negative)
            false_positive_count[i, j, 1] = np.count_nonzero(False_positive)
            false_negative_count[i, j, 1] = np.count_nonzero(False_negative)
            success[i, j, 1] = (np.count_nonzero(True_positive) + np.count_nonzero(True_negative)) / float(event_collection.qty_of_events)

            print("#### The agreement results for photon #{0} are : ####".format(mip))
            print("True positive : {0}    True negative: {1}".format(true_positive_count[i, j, 1], true_negative_count[i, j, 1]))
            print("False positive : {0}   False negative: {1}".format(false_positive_count[i, j, 1], false_negative_count[i, j, 1]))

            print("For an agreement of {0:02.2%}\n".format(success[i, j, 1]))



            index = np.logical_or(True_positive, True_negative)
            ETT = 1000*true_energy[index]
            index = np.logical_or(False_positive, False_negative)
            ETTF = 1000*true_energy[index]
            ax3.set_yscale("log")
            ax3.hist([ETT, ETTF], 75, stacked=True, color=['blue', 'red'])
            ax3.axvline(energy_thld_kev , color='green', linestyle='dashed', linewidth=2)
            ax3.set_xlabel('Total energy deposited (keV)', fontsize=8)
            ax3.set_ylabel('Counts', fontsize=8)
            x_legend_position = 100
            y_legend_position = ax3.get_ylim()[1]/10
            ax3.text(x_legend_position, y_legend_position, '{0:02.2%} agreement'.format(success[i, j, 1]))

            f.set_size_inches(4, 6)

            columns = ('Correct', 'Incorrect')
            rows = ('Int_Kept', 'Int_Rejected', 'Dep_Kept','Dep_Rejected')
            cell_text = ([true_positive_count[i, j, 0], false_positive_count[i, j, 0]],
                         [true_negative_count[i, j, 0], false_negative_count[i, j, 0]],
                         [true_positive_count[i, j, 1], false_positive_count[i, j, 1]],
                         [true_negative_count[i, j, 1], false_negative_count[i, j, 1]])

            ax4.axis('tight')
            ax4.axis('off')
            ax4.table(cellText=cell_text, rowLabels=rows, colWidths=[0.3, 0.3], colLabels=columns, loc='center', fontsize=8)
            plt.subplots_adjust(left=0.2, bottom=0.05)

            f.savefig(pp, format="pdf")

    plt.figure()
    plt.plot(mips, success[:,:,0])
    plt.legend(percentiles, loc=4)
    plt.xlabel("Photon selected for energy estimation")
    plt.ylabel("Agreement with integration method")

    pp.savefig()

    plt.figure()
    plt.plot(mips, success[:,:,1])
    plt.legend(percentiles, loc=4)
    plt.xlabel("Photon selected for energy estimation")
    plt.ylabel("Agreement with energy deposited")
    pp.savefig()

    np.savez(result_file, mips=mips, percentiles=percentiles, true_positive_count=true_positive_count,
             true_negative_count=true_negative_count, false_negative_count=false_negative_count,
             false_positive_count=false_positive_count, Single_Photon_Time_Resolution_FWHM=tr_sp_fwhm,
             BLUE_Time_Resolution=tr_BLUE_fwhm)

    pp.close()