Example #1
0
    def __init__(self,
                 correction_filename=default_calibration_filename,
                 node='geometry'):
        """ class that holds the calibration functions.
        node: (str) 'scale' or 'geometry' for lifetime scale or geometry map
        call() to apply corrections.
        """

        #print('calibration file :', correction_filename)
        #print('node             :', node)

        E0, _ = self._scale(correction_filename, 'E' + node)
        Q0, _ = self._scale(correction_filename, 'Q' + node)
        LT, _ = self._scale(correction_filename, 'Elifetime')
        #print('Energy   scale : {0:4.1f} (pes)'.format(E0))
        #print('Lifetime scale : {0:4.1f} (us) '.format(LT))
        #print('Charge   scale : {0:4.1f} (pes)'.format(Q0))
        self.E0 = E0
        self.LT = LT
        self.Q0 = Q0

        self.E0_correction = dstf.load_xy_corrections(correction_filename,
                                                      group="XYcorrections",
                                                      node="E" + node,
                                                      norm_strategy="const",
                                                      norm_opts={"value": E0})

        self.ELT_correction = dstf.load_lifetime_xy_corrections(
            correction_filename, group="XYcorrections", node="Elifetime")

        self.Q0_correction = dstf.load_xy_corrections(correction_filename,
                                                      group="XYcorrections",
                                                      node="Q" + node,
                                                      norm_strategy="const",
                                                      norm_opts={"value": Q0})

        self.QLT_correction = dstf.load_lifetime_xy_corrections(
            correction_filename, group="XYcorrections", node="Qlifetime")
Example #2
0
                if zdist_min > 0:
                    hadd_etot = sum([ha.E for ha in h_add])
                    for ha in h_add:
                        ha.energy += h1.E*(ha.E/hadd_etot)


hit_file = sys.argv[1]
blob_file = sys.argv[2]
evt_number = int(sys.argv[3])
mc = int(sys.argv[4])
radius = int(sys.argv[5])

corrections    = "/Users/paola/Software/ic_data/corrections/corrections_run6352.h5"
time_evolution = "/Users/paola/Software/ic_data/corrections/Time_evolution_6352.h5"
LTcorrection = dstf.load_lifetime_xy_corrections(corrections,
                                                 group="XYcorrections",
                                                 node="Elifetime")
XYcorrection  = dstf.load_xy_corrections(corrections,
                                    group = "XYcorrections",
                                    node = f"Egeometry",
                                    norm_strategy = "index",
                                    norm_opts = {"index": (40,40)})

dv_dst  = load_dst(time_evolution, group="parameters", node="test")
dvs = dv_dst.dv.values
drift_velocity = dvs.mean()

if mc:
    correctionsLT   = "/Users/paola/Software/ic_data/corrections/corrections_run6198.h5"
    correctionsXY = "/Users/paola/Software/ic_data/corrections/corrections_MC_4734.h5"
    LTcorrection = dstf.load_lifetime_xy_corrections(correctionsLT,
Example #3
0
import matplotlib.pyplot as plt

from glob import glob

from vetoedPDFs import sorter_func

from invisible_cities.io.pmaps_io import load_pmaps
import invisible_cities.core.fit_functions as fitf
from invisible_cities.icaro.hst_functions import shift_to_bin_centers
from invisible_cities.io.dst_io import load_dsts
from invisible_cities.reco.dst_functions import load_lifetime_xy_corrections
from invisible_cities.reco import pmaps_functions as pmf

lt_corr = load_lifetime_xy_corrections('corrs/corrections_run6198.h5',
                                       group='XYcorrections',
                                       node='Elifetime')


def compare_mc():
    """
    Looks at MC and data for the PMTs and checks scale levels
    at the level of individual PMTs and the sum.
    Attempt to check values for relative scaling for PMTs.
    run as python pmtCompMCData.py <MC data file base> <Data data file base>
    """

    mc_file_base = sys.argv[1]
    da_file_base = sys.argv[2]
    dst_mc_base = sys.argv[3]
    dst_da_base = sys.argv[4]
Example #4
0
        $\sigma$ = {:.2f}
        R = {:.3}%
        Rbb = {:.3}%""".format(*values[1:], *reso(values)))

def gaussexpotext(values):
    return textwrap.dedent("""
        $\mu$ = {:.1f}
        $\sigma$ = {:.2f}
        $\\bar x$ = {:.2f}
        R = {:.3}%
        Rbb = {:.3}%""".format(*values[1:3], values[4], *reso(values[0:3])))

###############################################################################################
## CORRECTIONS TABLE
XYcorr = dstf.load_xy_corrections(corr_file, group = "XYcorrections", node = f"GeometryE_5.0mm", norm_strategy = "const", norm_opts = {"value": 41.5})
LTcorr = dstf.load_lifetime_xy_corrections(corr_file, group = "XYcorrections", node  =  "Lifetime")
XYQcorr = dstf.load_xy_corrections(corr_file, group = "XYcorrections", node = f"GeometryQ_5.0mm", norm_strategy = "index", norm_opts = {"index": (40, 40)})
LTQcorr = dstf.load_lifetime_xy_corrections(corr_file, group = "XYcorrections", node  =  "QLifetime")

# Read in the output of Penthesilea.
def merge_NN_hits(hits_all,hits_nonNN):

    # Iterate through the nonNN dictionary and update the energies including the NN hits from the "all" dictionary.
    for (evt,hc) in hits_nonNN.items():

        # Get the corresponding collection of all hits.
        hc_all = hits_all[evt]
            
        # Add energy from all NN hits to hits in closest slice.
        for h1 in hc_all.hits: