Exemplo n.º 1
0
    def __init__(self, polar_root_file, reference_time=0., rsp_file=None):
        """
        container class that converts raw POLAR root data into useful python
        variables


        :param polar_root_file: path to polar event file
        :param reference_time: reference time of the events (tunix?)
        :param rsp_file: path to rsp file
        """

        # open the event file
        with open_ROOT_file(polar_root_file) as f:
            tmp = tree_to_ndarray(f.Get('polar_out'))

            # extract the pedestal corrected ADC channels
            # which are non-integer and possibly
            # less than zero
            pha = tmp['Energy']

            # non-zero ADC channels are invalid
            idx = pha >= 0
            pha = pha[idx]

            # get the dead time fraction
            self._dead_time_fraction = tmp['dead_ratio'][idx]

            # get the arrival time, in tunix of the events
            self._time = tmp['tunix'][idx] - reference_time

            # digitize the ADC channels into bins
            # these bins are preliminary

        with open_ROOT_file(rsp_file) as f:
            matrix = th2_to_arrays(f.Get('rsp'))[-1]
            ebounds = th2_to_arrays(f.Get('EM_bounds'))[-1]
            mc_low = th2_to_arrays(f.Get('ER_low'))[-1]
            mc_high = th2_to_arrays(f.Get('ER_high'))[-1]

        mc_energies = np.append(mc_low, mc_high[-1])

        # build the POLAR response

        self._rsp = InstrumentResponse(matrix=matrix,
                                       ebounds=ebounds,
                                       monte_carlo_energies=mc_energies)

        # bin the ADC channels

        self._binned_pha = np.digitize(pha, ebounds)
Exemplo n.º 2
0
    def __init__(self, polar_root_file, reference_time=0., rsp_file=None):
        """
        container class that converts raw POLAR root data into useful python
        variables


        :param polar_root_file: path to polar event file
        :param reference_time: reference time of the events (tunix?)
        :param rsp_file: path to rsp file
        """

        # open the event file
        with open_ROOT_file(polar_root_file) as f:
            tmp = tree_to_ndarray(f.Get('polar_out'))

            # extract the pedestal corrected ADC channels
            # which are non-integer and possibly
            # less than zero
            pha = tmp['Energy']

            # non-zero ADC channels are invalid
            idx = pha >= 0
            pha = pha[idx]

            # get the dead time fraction
            self._dead_time_fraction = tmp['dead_ratio'][idx]

            # get the arrival time, in tunix of the events
            self._time = tmp['tunix'][idx] - reference_time

            # digitize the ADC channels into bins
            # these bins are preliminary

        with open_ROOT_file(rsp_file) as f:
            matrix = th2_to_arrays(f.Get('rsp'))[-1]
            ebounds = th2_to_arrays(f.Get('EM_bounds'))[-1]
            mc_low = th2_to_arrays(f.Get('ER_low'))[-1]
            mc_high = th2_to_arrays(f.Get('ER_high'))[-1]

        mc_energies = np.append(mc_low, mc_high[-1])

        # build the POLAR response

        self._rsp = InstrumentResponse(matrix=matrix,
                                       ebounds=ebounds,
                                       monte_carlo_energies=mc_energies)

        # bin the ADC channels

        self._binned_pha = np.digitize(pha, ebounds)
Exemplo n.º 3
0
    def __init__(self, root_file, run_name):

        self._run_name = run_name

        # Read the data from the ROOT file

        with open_ROOT_file(root_file) as f:

            # Read first the TTrees as pandas DataFrame

            self._data_on = tree_to_ndarray(
                f.Get(run_name + '/data_on'))  # type: np.ndarray
            self._data_off = tree_to_ndarray(
                f.Get(run_name + '/data_off'))  # type: np.ndarray
            self._tRunSummary = np.squeeze(
                tree_to_ndarray(f.Get(run_name +
                                      '/tRunSummary')))  # type: np.ndarray

            # Now read the histogram
            self._log_recon_energies, \
            self._log_mc_energies, \
            self._hMigration = th2_to_arrays(f.Get(run_name + "/hMigration"))

            # Transform energies to keV (they are in TeV)
            self._log_recon_energies += 9
            self._log_mc_energies += 9

            # Compute bin centers and bin width of the Monte Carlo energy bins

            self._dE = (10**self._log_mc_energies[1:] -
                        10**self._log_mc_energies[:-1])
            self._mc_energies_c = (10**self._log_mc_energies[1:] +
                                   10**self._log_mc_energies[:-1]) / 2.0
            self._recon_energies_c = (10**self._log_recon_energies[1:] +
                                      10**self._log_recon_energies[:-1]) / 2.0

            self._n_chan = self._log_recon_energies.shape[0] - 1

            # Remove all nans by substituting them with 0.0
            idx = np.isfinite(self._hMigration)
            self._hMigration[~idx] = 0.0

            # Read the TGraph
            tgraph = f.Get(run_name + "/gMeanEffectiveArea")
            self._log_eff_area_energies, self._eff_area = tgraph_to_arrays(
                tgraph)

            # Transform the effective area to cm2 (it is in m2 in the file)
            self._eff_area *= 1e8  #This value is for VEGAS, because VEGAS effective area is in cm2

            # Transform energies to keV
            self._log_eff_area_energies += 9

        # Now use the effective area provided in the file to renormalize the migration matrix appropriately
        self._renorm_hMigration()

        # Exposure is tOn*(1-tDeadtimeFrac)
        self._exposure = float(1 -
                               self._tRunSummary['DeadTimeFracOn']) * float(
                                   self._tRunSummary['tOn'])

        # Members for generating OGIP equivalents

        self._mission = "VERITAS"
        self._instrument = "VERITAS"

        # Now bin the counts

        self._counts, _ = self._bin_counts_log(self._data_on['Erec'] * 1e9,
                                               self._log_recon_energies)

        # Now bin the background counts

        self._bkg_counts, _ = self._bin_counts_log(
            self._data_off['Erec'] * 1e9, self._log_recon_energies)

        print(
            "Read a %s x %s matrix, spectrum has %s bins, eff. area has %s elements"
            % (self._hMigration.shape[0], self._hMigration.shape[1],
               self._counts.shape[0], self._eff_area.shape[0]))

        # Read in the background renormalization (ratio between source and background region)

        self._bkg_renorm = float(self._tRunSummary['OffNorm'])

        self._start_energy = np.log10(175E6)  #175 GeV in keV
        self._end_energy = np.log10(18E9)  #18 TeV in keV
        self._first_chan = (
            np.abs(self._log_recon_energies -
                   self._start_energy)).argmin()  #Values used by Giacomo 61
        self._last_chan = (
            np.abs(self._log_recon_energies -
                   self._end_energy)).argmin()  #Values used by Giacomo 110
Exemplo n.º 4
0
    def __init__(self, root_file, run_name):

        self._run_name = run_name

        # Read the data from the ROOT file

        with open_ROOT_file(root_file) as f:

            # Read first the TTrees as pandas DataFrame

            self._data_on = tree_to_ndarray(f.Get(run_name+'/data_on'))  # type: np.ndarray
            self._data_off = tree_to_ndarray(f.Get(run_name+'/data_off'))  # type: np.ndarray
            self._tRunSummary = np.squeeze(tree_to_ndarray(f.Get(run_name+'/tRunSummary')))  # type: np.ndarray

            # Now read the histogram
            self._log_recon_energies, \
            self._log_mc_energies, \
            self._hMigration = th2_to_arrays(f.Get(run_name + "/hMigration"))

            # Transform energies to keV (they are in TeV)
            self._log_recon_energies += 9
            self._log_mc_energies += 9

            # Compute bin centers and bin width of the Monte Carlo energy bins

            self._dE = (10 ** self._log_mc_energies[1:] - 10 ** self._log_mc_energies[:-1])
            self._mc_energies_c = (10 ** self._log_mc_energies[1:] + 10 ** self._log_mc_energies[:-1]) / 2.0
            self._recon_energies_c = (10 ** self._log_recon_energies[1:] + 10 ** self._log_recon_energies[:-1]) / 2.0
	
            self._n_chan = self._log_recon_energies.shape[0] - 1

            # Remove all nans by substituting them with 0.0
            idx = np.isfinite(self._hMigration)
            self._hMigration[~idx] = 0.0

            # Read the TGraph
            tgraph = f.Get(run_name + "/gMeanEffectiveArea")
            self._log_eff_area_energies, self._eff_area = tgraph_to_arrays(tgraph)

            # Transform the effective area to cm2 (it is in m2 in the file)
            self._eff_area *= 1e8 #This value is for VEGAS, because VEGAS effective area is in cm2

            # Transform energies to keV
            self._log_eff_area_energies += 9

        # Now use the effective area provided in the file to renormalize the migration matrix appropriately
        self._renorm_hMigration()

        # Exposure is tOn*(1-tDeadtimeFrac)
        self._exposure = float(1 - self._tRunSummary['DeadTimeFracOn']) * float(self._tRunSummary['tOn'])

        # Members for generating OGIP equivalents

        self._mission = "VERITAS"
        self._instrument = "VERITAS"

        # Now bin the counts

        self._counts, _ = self._bin_counts_log(self._data_on['Erec'] * 1e9, self._log_recon_energies)

        # Now bin the background counts

        self._bkg_counts, _ = self._bin_counts_log(self._data_off['Erec'] * 1e9, self._log_recon_energies)

        print("Read a %s x %s matrix, spectrum has %s bins, eff. area has %s elements" %
              (self._hMigration.shape[0], self._hMigration.shape[1], self._counts.shape[0], self._eff_area.shape[0]))

        # Read in the background renormalization (ratio between source and background region)

        self._bkg_renorm = float(self._tRunSummary['OffNorm'])

        self._start_energy = np.log10(175E6)#175 GeV in keV
        self._end_energy = np.log10(18E9)#18 TeV in keV
        self._first_chan = (np.abs(self._log_recon_energies-self._start_energy)).argmin()
        self._last_chan = (np.abs(self._log_recon_energies-self._end_energy)).argmin()
Exemplo n.º 5
0
    def from_root_file(cls, map_tree_file, roi):
        """
        Create a MapTree object from a ROOT file and a ROI. Do not use this directly, use map_tree_factory instead.

        :param map_tree_file:
        :param roi:
        :return:
        """

        map_tree_file = sanitize_filename(map_tree_file)

        # Check that they exists and can be read

        if not file_existing_and_readable(map_tree_file):

            raise IOError("MapTree %s does not exist or is not readable" %
                          map_tree_file)

        # Make sure we have a proper ROI (or None)

        assert isinstance(roi, HealpixROIBase) or roi is None, "You have to provide an ROI choosing from the " \
                                                               "available ROIs in the region_of_interest module"

        if roi is None:

            custom_warnings.warn(
                "You have set roi=None, so you are reading the entire sky")

        # Read map tree

        with open_ROOT_file(map_tree_file) as f:

            data_bins_labels = list(
                root_numpy.tree2array(f.Get("BinInfo"), "name"))

            # A transit is defined as 1 day, and totalDuration is in hours
            # Get the number of transit from bin 0 (as LiFF does)

            n_transits = root_numpy.tree2array(f.Get("BinInfo"),
                                               "totalDuration") / 24.0

            n_bins = len(data_bins_labels)

            # These are going to be Healpix maps, one for each data analysis bin_name

            data_analysis_bins = []

            for i in range(n_bins):

                name = data_bins_labels[i]

                bin_label = "nHit0%s/%s" % (name, "data")

                bkg_label = "nHit0%s/%s" % (name, "bkg")

                # Get ordering scheme
                nside = f.Get(bin_label).GetUserInfo().FindObject(
                    "Nside").GetVal()
                nside_bkg = f.Get(bkg_label).GetUserInfo().FindObject(
                    "Nside").GetVal()

                assert nside == nside_bkg

                scheme = f.Get(bin_label).GetUserInfo().FindObject(
                    "Scheme").GetVal()
                scheme_bkg = f.Get(bkg_label).GetUserInfo().FindObject(
                    "Scheme").GetVal()

                assert scheme == scheme_bkg

                assert scheme == 0, "NESTED scheme is not supported yet"

                if roi is not None:

                    # Only read the elements in the ROI

                    active_pixels = roi.active_pixels(nside,
                                                      system='equatorial',
                                                      ordering='RING')

                    counts = cls._read_partial_tree(nside, f.Get(bin_label),
                                                    active_pixels)
                    bkg = cls._read_partial_tree(nside, f.Get(bkg_label),
                                                 active_pixels)

                    this_data_analysis_bin = DataAnalysisBin(
                        name,
                        SparseHealpix(counts, active_pixels, nside),
                        SparseHealpix(bkg, active_pixels, nside),
                        active_pixels_ids=active_pixels,
                        n_transits=n_transits[i],
                        scheme='RING')

                else:

                    # Read the entire sky.

                    counts = tree_to_ndarray(f.Get(bin_label),
                                             "count").astype(np.float64)
                    bkg = tree_to_ndarray(f.Get(bkg_label),
                                          "count").astype(np.float64)

                    this_data_analysis_bin = DataAnalysisBin(
                        name,
                        DenseHealpix(counts),
                        DenseHealpix(bkg),
                        active_pixels_ids=None,
                        n_transits=n_transits[i],
                        scheme='RING')

                data_analysis_bins.append(this_data_analysis_bin)

        return cls(data_bins_labels, data_analysis_bins, roi)
Exemplo n.º 6
0
    def __init__(self, response_file_name):

        # Make sure file is readable

        response_file_name = sanitize_filename(response_file_name)

        # Check that they exists and can be read

        if not file_existing_and_readable(response_file_name):

            raise IOError("Response %s does not exist or is not readable" % response_file_name)

        self._response_file_name = response_file_name

        # Read response

        with open_ROOT_file(response_file_name) as f:

            # Get the name of the trees
            object_names = get_list_of_keys(f)

            # Make sure we have all the things we need

            assert 'LogLogSpectrum' in object_names
            assert 'DecBins' in object_names
            assert 'AnalysisBins' in object_names

            # Read spectrum used during the simulation
            self._log_log_spectrum = TF1Wrapper(f.Get("LogLogSpectrum"))

            # Get the analysis bins definition
            dec_bins = tree_to_ndarray(f.Get("DecBins"))

            dec_bins_lower_edge = dec_bins['lowerEdge']  # type: np.ndarray
            dec_bins_upper_edge = dec_bins['upperEdge']  # type: np.ndarray
            dec_bins_center = dec_bins['simdec']  # type: np.ndarray

            self._dec_bins = zip(dec_bins_lower_edge, dec_bins_center, dec_bins_upper_edge)

            # Read in the ids of the response bins ("analysis bins" in LiFF jargon)
            response_bins_ids = tree_to_ndarray(f.Get("AnalysisBins"), "id")  # type: np.ndarray

            # Now we create a list of ResponseBin instances for each dec bin_name
            self._response_bins = collections.OrderedDict()

            for dec_id in range(len(self._dec_bins)):

                this_response_bins = []

                min_dec, dec_center, max_dec = self._dec_bins[dec_id]

                for response_bin_id in response_bins_ids:

                    this_response_bin = ResponseBin(f, dec_id, response_bin_id, self._log_log_spectrum,
                                                    min_dec, dec_center, max_dec)

                    this_response_bins.append(this_response_bin)

                self._response_bins[self._dec_bins[dec_id][1]] = this_response_bins

        del f
Exemplo n.º 7
0
    def from_root_file(cls, response_file_name):

        # Make sure file is readable

        response_file_name = sanitize_filename(response_file_name)

        # Check that they exists and can be read

        if not file_existing_and_readable(
                response_file_name):  # pragma: no cover
            raise IOError("Response %s does not exist or is not readable" %
                          response_file_name)

        # Read response

        with open_ROOT_file(response_file_name) as f:

            # Get the name of the trees
            object_names = get_list_of_keys(f)

            # Make sure we have all the things we need

            assert 'LogLogSpectrum' in object_names
            assert 'DecBins' in object_names
            assert 'AnalysisBins' in object_names

            # Read spectrum used during the simulation
            log_log_spectrum = f.Get("LogLogSpectrum")

            # Get the analysis bins definition
            dec_bins_ = tree_to_ndarray(f.Get("DecBins"))

            dec_bins_lower_edge = dec_bins_['lowerEdge']  # type: np.ndarray
            dec_bins_upper_edge = dec_bins_['upperEdge']  # type: np.ndarray
            dec_bins_center = dec_bins_['simdec']  # type: np.ndarray

            dec_bins = zip(dec_bins_lower_edge, dec_bins_center,
                           dec_bins_upper_edge)

            # Read in the ids of the response bins ("analysis bins" in LiFF jargon)
            response_bins_ids = tree_to_ndarray(f.Get("AnalysisBins"),
                                                "id")  # type: np.ndarray

            # Now we create a list of ResponseBin instances for each dec bin_name
            response_bins = {}

            for dec_id in range(len(dec_bins)):

                this_response_bins = []

                min_dec, dec_center, max_dec = dec_bins[dec_id]

                for response_bin_id in response_bins_ids:

                    this_response_bin = ResponseBin.from_ttree(
                        f, dec_id, response_bin_id, log_log_spectrum, min_dec,
                        dec_center, max_dec)

                    this_response_bins.append(this_response_bin)

                response_bins[dec_bins[dec_id][1]] = this_response_bins

        # Now the file is closed. Let's explicitly remove f so we are sure it is freed
        del f

        # Instance the class and return it
        instance = cls(response_file_name, dec_bins, response_bins)

        return instance