Пример #1
0
    def __init__(self, *argv):
        """
        Constructor.
        """
        # Set name and version
        self._name    = 'csobs2caldb'
        self._version = '1.1.0'

        # Initialise members
        self._observation = gammalib.GCTAObservation()
        self._mission     = 'cta'
        self._caldb       = 'cta'
        self._outfile     = gammalib.GFilename('irf_file.fits')
        self._base_dir    = ''
        self._cal_dir     = ''
        self._rsp_dir     = ''
        self._caldb_inx   = gammalib.GFits()
        self._irf_fits    = gammalib.GFits()

        # Initialise observation container from constructor arguments.
        self._obs, argv = self._set_input_obs(argv)
        
        # Initialise script by calling the appropriate class constructor.
        self._init_cscript(argv)

        # Return
        return
Пример #2
0
    def __init__(self, *argv):
        """
        Constructor.
        """
        # Set name
        self._name = "cslightcrv"
        self._version = "1.1.0"

        # Initialise some members
        self._srcname = ""
        self._tbins = gammalib.GGti()
        self._stacked = False
        self._fits = gammalib.GFits()

        # Initialise observation container from constructor arguments.
        self._obs, argv = self._set_input_obs(argv)

        # Initialise script by calling the appropriate class constructor.
        self._init_cscript(argv)

        # Set logger properties
        self._log_header()
        self._log.date(True)

        # Return
        return
Пример #3
0
    def _check_result_file(self, filename):
        """
        Check result file
        """
        # Open result file
        fits = gammalib.GFits(filename)

        # Get HDUs
        ts = fits['Primary']
        prefactor = fits['Prefactor']
        index = fits['Index']

        # Check dimensions
        self.test_value(ts.naxis(), 2, 'Check for 2 dimensions')
        self.test_value(ts.naxes(0), 5, 'Check for 5 pixels in X')
        self.test_value(ts.naxes(1), 5, 'Check for 5 pixels in Y')
        self.test_value(prefactor.naxis(), 2, 'Check for 2 dimensions')
        self.test_value(prefactor.naxes(0), 5, 'Check for 5 pixels in X')
        self.test_value(prefactor.naxes(1), 5, 'Check for 5 pixels in Y')
        self.test_value(index.naxis(), 2, 'Check for 2 dimensions')
        self.test_value(index.naxes(0), 5, 'Check for 5 pixels in X')
        self.test_value(index.naxes(1), 5, 'Check for 5 pixels in Y')

        # Return
        return
Пример #4
0
    def _check_arf(self, filename, bins):
        """
        Check ARF file
        """
        # Expected column names
        cols = ['ENERG_LO', 'ENERG_HI', 'SPECRESP']

        # Open FITS file
        fits = gammalib.GFits(filename)

        # Check FITS file structure
        self.test_value(fits.size(), 2, 'Check for 2 extensions in ARF file')
        self.test_assert(fits.contains('SPECRESP'),
                         'Test if ARF file contains "SPECRESP" extension')

        # Get SPECRESP table
        table = fits['SPECRESP']

        # Check FITS table structure
        self.test_value(table.ncols(), len(cols),
                        'Check for %d columns in ARF table' % len(cols))
        self.test_value(table.nrows(), bins,
                        'Check for %d rows in ARF table' % bins)
        for col in cols:
            self.test_assert(table.contains(col),
                             'Test if ARF file contains "' + col + '" column')

        # Close FITS file
        fits.close()

        # Return
        return
Пример #5
0
    def _check_result_files(self, fitsfile, xmlfile, nmodels):
        """
        Check ctfindvar result

        Parameters
        ----------
        fitsfile : str
            FITS file name
        xmlfile : str
            Model definition XML file name
        nmodels : int
            Expected number of models
        """
        # Read variability FITS file
        if fitsfile is not 'NONE':
            fits = gammalib.GFits(fitsfile)
            self.test_value(fits.size(), 3, 'Check for 3 extensions in output file')

        # Read model definition XML file
        models = gammalib.GModels(xmlfile)
        self.test_value(models.size(), nmodels,
             'Check for %d models in model definition XML file' % nmodels)

        # Return
        return
Пример #6
0
    def _check_n_obs(self, pathname, n_expected):
        """
        Check number of available observations

        Parameters
        ----------
        pathname : str
            Path to copied IACT data store
        n_expected : int
            Expected number of observations in IACT data store
        """
        # Set file name
        obs_index_name = gammalib.GFilename(pathname+'/obs-index.fits[OBS_INDEX]')
        
        # Open index file
        fits = gammalib.GFits(obs_index_name)
        
        # Get number of observations
        n_obs = fits[obs_index_name.extname()].nrows()
        
        # Close FITS file
        fits.close()

        # Check for existence of observations
        self.test_value(n_obs, n_expected, 'Check for number of observations')
                
        # Return
        return
Пример #7
0
    def save(self):
        """ 
        Save TS map and remove slices if requested.
        """
        # Write header
        if self._logTerse():
            self._log("\n")
            self._log.header1("Save TS map")

        # Get output filename in case it was not read ahead
        outmap = self["outmap"].filename()

        # Log filename
        if self._logTerse():
            self._log(gammalib.parformat("TS map file"))
            self._log(outmap.url())
            self._log("\n")

        # Create FITS file
        fits = gammalib.GFits()

        # Write TS map into primary
        self._tsmap.write(fits)

        # Loop over maps and write them to fits
        for i in range(len(self._maps)):
            self._maps[i].write(fits)

        # Set map names as extensions
        for i in range(len(self._mapnames)):
            fits[i + 1].extname(self._mapnames[i])

        # Check if map is fully done
        done = True
        for pix in self._statusmap:
            if pix < 0.5:
                done = False
                break

        # Write status map if we are not done yet
        if not done:
            self._statusmap.write(fits)
            fits[fits.size() - 1].extname("STATUS MAP")

        # Save FITS file
        fits.saveto(outmap, self._clobber())

        # Delete TS input maps if requested
        if self._delete:
            for filename in self._merged_files:
                os.remove(filename)
                if self._logTerse():
                    self._log(gammalib.parformat("Deleted input file"))
                    self._log(filename)
                    self._log("\n")

        # Return
        return
Пример #8
0
    def run(self):
        """
        Run the script.
        """
        # Switch screen logging on in debug mode
        if self._logDebug():
            self._log.cout(True)

        # Get parameters
        self._get_parameters()

        # Get the calibration database
        caldb = gammalib.GCaldb()

        # Extract mission names from the calibration database
        missions = self._get_missions(caldb)

        # Loop over missions
        for mission in missions:

            # Skip all non-CTA instruments
            if mission != 'cta':
                continue

            # Write mission into logger
            self._log_header1(gammalib.TERSE, 'Mission: ' + mission)

            # Extract instruments
            instruments = self._get_instruments(caldb, mission)

            # Loop over instruments
            for instrument in instruments:

                # Write instrument into logger
                self._log_header3(
                    gammalib.TERSE,
                    'Response functions in database "' + instrument + '"')

                # Open calibration index file and retrieve calibrations
                filename = '/data/' + mission + '/' + instrument + '/caldb.indx'
                cifname = caldb.rootdir() + filename
                fits = gammalib.GFits(cifname)
                cif = fits['CIF']
                caltable = cif['CAL_CBD']

                # Extract response names
                names = self._get_response_names(caltable)

                # Print response name
                if self._logTerse():
                    for name in names:
                        self._log(name + '\n')
                    self._log('\n')

        # Return
        return
Пример #9
0
def events_gammalib2rec(obs_list):
    events = obs_list[0].events()  # GCTAEventList
    fits = gammalib.GFits()
    events.write(fits)  # GFits
    events_bintable = fits.table('EVENTS')  # GFitsTable
    events_num = events_bintable.nrows()
    tuples = [(events_bintable['RA'][i], events_bintable['DEC'][i],
               events_bintable['ENERGY'][i]) for i in range(events_num)]
    return np.rec.array(tuples,
                        formats='float,float,float',
                        names='RA,DEC,ENERGY')
Пример #10
0
    def _create_fits(self):
        """
        Create FITS file object from fit results
        """
        # Initialise list of result dictionaries
        results = []

        # Get source parameters
        pars = self._get_free_par_names()

        # Loop over time bins
        for i in range(len(self._phbins)):

            # Get time boundaries
            phmin = self._phbins[i][0]
            phmax = self._phbins[i][1]

            # Initialise result dictionary
            result = {
                'phmin': phmin,
                'phmax': phmax,
                'pars': pars,
                'values': {}
            }

            # Store fit results
            phname = str(phmin) + '-' + str(phmax)

            # If the model contains the source of interest fill results
            try:
                source = self._fitmodels[phname][self._srcname]
                for par in pars:
                    result['values'][par] = source[par].value()
                    result['values']['e_' + par] = source[par].error()

            # ... otherwise fills with zeros
            except:
                for par in pars:
                    result['values'][par] = 0.
                    result['values']['e_' + par] = 0.

            # Append result to list of dictionaries
            results.append(result)

        # Create FITS table from results
        table = self._create_fits_table(results)

        # Create FITS file and append FITS table to FITS file
        self._fits = gammalib.GFits()
        self._fits.append(table)

        # Return
        return
Пример #11
0
    def __init__(self, *argv):
        """
        Constructor
        """
        # Initialise application by calling the appropriate class constructor
        self._init_csobservation(self.__class__.__name__, ctools.__version__,
                                 argv)

        # Initialise members
        self._observation = gammalib.GCTAObservation()
        self._mission = 'cta'
        self._caldb = 'cta'
        self._outfile = gammalib.GFilename('irf_file.fits')
        self._base_dir = ''
        self._cal_dir = ''
        self._rsp_dir = ''
        self._caldb_inx = gammalib.GFits()
        self._irf_fits = gammalib.GFits()

        # Return
        return
Пример #12
0
    def __init__(self, *argv):
        """
        Constructor
        """
        # Initialise application by calling the appropriate class constructor
        self._init_csobservation(self.__class__.__name__, ctools.__version__,
                                 argv)

        # Initialise class members
        self._use_maps = False
        self._stack = False
        self._mask = False
        self._fits = gammalib.GFits()

        # Return
        return
Пример #13
0
    def __init__(self, *argv):
        """
        Constructor
        """
        # Initialise application by calling the appropriate class constructor
        self._init_csobservation(self.__class__.__name__, ctools.__version__,
                                 argv)

        # Initialise some members
        self._srcname = ''
        self._tbins = gammalib.GGti()
        self._onoff = False
        self._stacked = False
        self._fits = gammalib.GFits()

        # Return
        return
Пример #14
0
    def _check_result_file(self, filename, bins):
        """
        Check result file
        """
        # Open result file
        fits = gammalib.GFits(filename)

        # Get spectrum table
        spectrum = fits['SPECTRUM']

        # Check dimensions
        self.test_value(spectrum.nrows(), bins,
                        'Check for %d rows in spectrum' % bins)
        self.test_value(spectrum.ncols(), 8, 'Check for 8 columns in spectrum')

        # Return
        return
Пример #15
0
    def _check_result_file(self, filename, test_complete=True):
        """
        Check result file
        """
        # Open result file
        fits = gammalib.GFits(filename)

        # Get HDUs
        ts = fits['Primary']
        prefactor = fits['Prefactor']
        index = fits['Index']
        status = fits['STATUS MAP']

        # Check dimensions
        self.test_value(ts.naxis(), 2, 'Check for 2 dimensions')
        self.test_value(ts.naxes(0), 2, 'Check for 2 pixels in X')
        self.test_value(ts.naxes(1), 1, 'Check for 1 pixel in Y')
        self.test_value(prefactor.naxis(), 2, 'Check for 2 dimensions')
        self.test_value(prefactor.naxes(0), 2, 'Check for 2 pixels in X')
        self.test_value(prefactor.naxes(1), 1, 'Check for 1 pixel in Y')
        self.test_value(index.naxis(), 2, 'Check for 2 dimensions')
        self.test_value(index.naxes(0), 2, 'Check for 2 pixels in X')
        self.test_value(index.naxes(1), 1, 'Check for 1 pixel in Y')
        self.test_value(status.naxis(), 2, 'Check for 2 dimensions')
        self.test_value(status.naxes(0), 2, 'Check for 2 pixels in X')
        self.test_value(status.naxes(1), 1, 'Check for 1 pixel in Y')

        # Initialise flag if map is complete
        done = True

        # Loop over status sky map
        for pix in status:

            # Check if pix is larger than threshold
            if pix < -0.5:
                done = False
                break

        # Test for completeness of merged map
        self.test_assert(done == test_complete,
                         'Test if map was merged completely')

        # Return
        return
Пример #16
0
    def _check_column(self, filename, colname):
        """
        Check that column colname exists.

        Parameters
        ----------
        filename : str
            Event list file name
        colname : str
            Name of the column to check
        """
        # Check that column exists
        gfits = gammalib.GFits(filename)
        gtable = gfits[1]
        check = int(gtable.contains(colname))
        self.test_value(check, 1, 'Check column '+colname)

        # Return
        return
Пример #17
0
 def csspec_run(self,
                input_obs_list,
                input_models=None,
                enumbins=20,
                output_file='spectrum.fits',
                log_file='csspec.log',
                force=False,
                save=False):
     spec = cscripts.csspec()
     if isinstance(input_obs_list, gammalib.GObservations):
         spec.obs(input_obs_list)
     elif os.path.isfile(input_obs_list) and os.path.isfile(input_models):
         # observations list from file
         spec["inobs"] = input_obs_list
         spec["inmodel"] = input_models
     else:
         raise Exception('Cannot understand input obs list for csspec')
     spec["srcname"] = self.name
     spec["caldb"] = self.caldb
     spec["irf"] = self.irf
     spec["method"] = "AUTO"
     spec["emin"] = 0.03
     spec["emax"] = 150.0
     spec['ebinalg'] = "LOG"
     spec["enumbins"] = enumbins
     spec['calc_ts'] = True
     spec['calc_ulim'] = True
     spec['outfile'] = output_file
     spec["logfile"] = log_file
     spec["nthreads"] = self.nthreads
     if force or not os.path.isfile(output_file):
         spec.logFileOpen()
         spec.run()
     elif os.path.isfile(output_file):
         spec._fits = gammalib.GFits(output_file)
     else:
         raise Exception("Cannot proceed with csspec")
     saved = False
     if (save and force) or (save and not os.path.isfile(output_file)):
         spec.save()
         saved = True
         logger.info("File {} created.".format(output_file))
     return spec
Пример #18
0
    def _check_normalization(self, filename, colnames):
        """
        Check that the probability values in columns identified by colnames are
        correctly normalized.

        Parameters
        ----------
        filename : str
            Event list file name
        colnames : list
            Names of the columns to check
        """
        # Check that the probability values are correctly normalized

        # Open fits table
        gfits  = gammalib.GFits(filename)
        gtable = gfits[1]
        ncols  = gtable.ncols()
        nevt   = gtable.nrows()
        srcs   = []

        # Loop over colnames
        for colname in colnames:

            # Get the correct column corresponding to colname from the table
            for icol in range(ncols):
                gcol = gtable[icol] 
                if gcol.name()==colname:
                    break

            # Read and save column content 
            src1 = []
            for ievt in range(nevt):
                src1.append(gcol[ievt])
            srcs.extend(src1)

        # Compute sum
        tot = sum(srcs)
        self.test_value(int(tot+0.5), nevt, 'Check that probability columns '
                                            'are normalized')

        # Return
        return
Пример #19
0
def gamma_saveto(file_size):
    import gammalib

    # Set filename
    filename = "data.fits"

    fits = gammalib.GFits(filename, True)

    # multiply by 1000 to get MB filesize
    nrows = file_size * 1024
    # one column contains a string that is 1kb
    col9 = gammalib.GFitsTableStringCol("STRING", nrows, 1024)
    for i in range(nrows):
        col9[i] = str(i * 100)
    tbl_ascii = gammalib.GFitsAsciiTable()
    tbl_ascii.append_column(col9)
    fits.append(tbl_ascii)

    fits.saveto("copy.fits")
Пример #20
0
    def _check_phase_curve(self, filename, bins):
        """
        Check phase curve file
        """
        # Expected column names
        cols = [
            'PHASE_MIN', 'PHASE_MAX', 'Prefactor', 'e_Prefactor', 'Index',
            'e_Index'
        ]

        # Open FITS file
        fits = gammalib.GFits(filename)

        # Check FITS file structure
        self.test_value(fits.size(), 2,
                        'Check for 2 extensions in phase curve FITS file')
        self.test_assert(fits.contains('PHASECURVE'),
                         'FITS file contains "PHASECURVE" extension')

        # Get PHASECURVE table
        table = fits['PHASECURVE']

        # Check FITS table structure
        self.test_value(
            table.ncols(), len(cols),
            'Check for %d columns in phase curve FITS table' % len(cols))
        self.test_value(table.nrows(), bins,
                        'Check for %d rows in phase curve FITS table' % bins)
        for col in cols:
            self.test_assert(table.contains(col),
                             'FITS file contains "' + col + '" column')

        # Check that table has been filled
        # Prefactor has right order of magnitude
        for s in range(table.nrows()):
            self.test_value(table['Prefactor'][s], 4.e-16, 3.e-16,
                            'Check prefactor value')

        # Close FITS file
        fits.close()

        # Return
        return
Пример #21
0
    def _check_light_curve(self, filename, bins, prefactor=5.7e-16):
        """
        Check light curve file
        """
        # Expected column names
        cols = [
            'MJD', 'e_MJD', 'Prefactor', 'e_Prefactor', 'Index', 'e_Index',
            'TS', 'DiffUpperLimit', 'FluxUpperLimit', 'EFluxUpperLimit'
        ]

        # Open FITS file
        fits = gammalib.GFits(filename)

        # Check FITS file structure
        self.test_value(fits.size(), 2,
                        'Check for 2 extensions in light curve FITS file')
        self.test_assert(fits.contains('LIGHTCURVE'),
                         'FITS file contains "LIGHTCURVE" extension')

        # Get LIGHTCURVE table
        table = fits['LIGHTCURVE']

        # Check FITS table structure
        self.test_value(
            table.ncols(), len(cols),
            'Check for %d columns in light curve FITS table' % len(cols))
        self.test_value(table.nrows(), bins,
                        'Check for %d rows in light curve FITS table' % bins)
        for col in cols:
            self.test_assert(table.contains(col),
                             'FITS file contains "' + col + '" column')

        # Check that the Prefactor has the right order of magnitude
        for s in range(table.nrows()):
            self.test_value(table['Prefactor'][s], prefactor, 0.9 * prefactor,
                            'Check prefactor value')

        # Close FITS file
        fits.close()

        # Return
        return
Пример #22
0
    def _check_result_file(self, filename):
        """
        Check result file
        """
        # Open result file
        fits = gammalib.GFits(filename)

        # Get HDUs
        cube = fits['Primary']
        ebounds = fits['EBOUNDS']
        gti = fits['GTI']

        # Check dimensions
        self.test_value(cube.naxis(), 3, 'Check for 3 cube dimensions')
        self.test_value(cube.naxes(0), 200, 'Check for 200 pixels in X')
        self.test_value(cube.naxes(1), 200, 'Check for 200 pixels in Y')
        self.test_value(cube.naxes(2), 20, 'Check for 20 pixels in Z')

        # Return
        return
Пример #23
0
    def _check_pha(self, filename, bins, ncols=8):
        """
        Check PHA file
        """
        # Expected column names
        cols = [
            'CHANNEL', 'COUNTS', 'STAT_ERR', 'SYS_ERR', 'QUALITY', 'GROUPING',
            'AREASCAL', 'BACKSCAL'
        ]

        # Open FITS file
        fits = gammalib.GFits(filename)

        # Check FITS file structure
        self.test_value(fits.size(), 3, 'Check for 3 extensions in PHA file')
        self.test_assert(fits.contains('SPECTRUM'),
                         'Test if PHA file contains "SPECTRUM" extension')
        self.test_assert(fits.contains('EBOUNDS'),
                         'Test if PHA file contains "EBOUNDS" extension')

        # Get SPECTRUM table
        table = fits['SPECTRUM']

        # Check FITS table structure
        self.test_value(table.ncols(), ncols,
                        'Check for %d columns in PHA table' % len(cols))
        self.test_value(table.nrows(), bins,
                        'Check for %d rows in PHA table' % bins)
        for col in cols:
            self.test_assert(table.contains(col),
                             'Test if PHA file contains "' + col + '" column')

        # Check EBOUNDS table
        table = fits['EBOUNDS']
        self._check_ebounds(table, bins)

        # Close FITS file
        fits.close()

        # Return
        return
Пример #24
0
    def add(self, rspname, split=False, clobber=True):
        """
        Add new calibration. The actual version will put
        all calibrations in the same file, although each part of the response
        function will have its own logical name. We can thus easily modify
        the script to put each calibration information in a separate file.

        Parameters:
         rspname - Response name
        Keywords:
         split   - Split IRF over several files?
         clobber - Overwrite existing files?
        """
        # Set calibrate file names
        if split:
            self.ea_file = "ea_" + rspname + ".dat"
            self.psf_file = "psf_" + rspname + ".dat"
            self.edisp_file = "edisp_" + rspname + ".dat"
            self.bgd_file = "bgd_" + rspname + ".dat"
        else:
            self.ea_file = "irf_" + rspname + ".dat"
            self.psf_file = "irf_" + rspname + ".dat"
            self.edisp_file = "irf_" + rspname + ".dat"
            self.bgd_file = "irf_" + rspname + ".dat"

        # Open calibration database index
        if self.cif == None:
            self.cif = gammalib.GFits(self.base_path + "/caldb.indx", True)

        # If file has no CIF extension than create it now
        try:
            self.hdu_cif = self.cif.table("CIF")
        except:
            self.create_cif()
            self.hdu_cif = self.cif.table("CIF")

        # Set response name
        self.cal_name = "NAME(" + rspname + ")"

        # Return
        return
Пример #25
0
    def __init__(self, *argv):
        """
        Constructor
        """
        # Initialise application by calling the appropriate class constructor
        self._init_csobservation(self.__class__.__name__, ctools.__version__,
                                 argv)

        # Initialise some members. Phases are stored in a nested list
        # [[ph1min,ph1max], [ph2min,ph2max],..]
        self._srcname = ''
        self._phbins = [[0.0, 1.0]]
        self._onoff = False
        self._stacked = False
        self._fits = gammalib.GFits()
        self._fitmodels = {}
        self._nthreads = 0
        self._excl_reg_map = None  # Exclusion region map for on/off analysis

        # Return
        return
Пример #26
0
    def _read_pha_counts(self, filename):
        """
        Read and integrate the counts in a pha file.
        Pha file structure already tested in _check_pha()
        """
        # Open FITS file
        fits = gammalib.GFits(filename)

        # Get SPECTRUM table
        table = fits['SPECTRUM']

        # Integrate counts
        counts_col = table['COUNTS']
        counts = 0
        for channel in range(counts_col.nrows()):
            counts += counts_col[channel]

        # Close FITS file
        fits.close()

        # Return
        return counts
Пример #27
0
    def _check_rmf(self, filename, bins, etruebins=17):
        """
        Check RMF file
        """
        # Expected column names
        cols = ['ENERG_LO', 'ENERG_HI', 'N_GRP', 'F_CHAN', 'N_CHAN', 'MATRIX']

        # Open FITS file
        fits = gammalib.GFits(filename)

        # Check FITS file structure
        self.test_value(fits.size(), 3, 'Check for 3 extensions in RMF file')
        self.test_assert(fits.contains('MATRIX'),
                         'Test if RMF file contains "MATRIX" extension')
        self.test_assert(fits.contains('EBOUNDS'),
                         'Test if RMF file contains "EBOUNDS" extension')

        # Get MATRIX table
        table = fits['MATRIX']

        # Check FITS table structure
        self.test_value(table.ncols(), len(cols),
                        'Check for %d columns in RMF table' % len(cols))
        self.test_value(table.nrows(), etruebins,
                        'Check for %d rows in RMF table' % etruebins)
        for col in cols:
            self.test_assert(table.contains(col),
                             'Test if RMF file contains "' + col + '" column')

        # Check EBOUNDS table
        table = fits['EBOUNDS']
        self._check_ebounds(table, bins)

        # Close FITS file
        fits.close()

        # Return
        return
Пример #28
0
    def _load_skymap(self):
        """
        Load sky map

        Returns
        -------
        skymap : `~gammalib.GSkyMap()`
            Sky map
        """
        # Get skymap filename
        inmap = self['inmap'].filename()

        # Open sky map file
        fits = gammalib.GFits(inmap)

        # Extract primary extension as sky map
        skymap = gammalib.GSkyMap(fits.image(0))

        # Close sky map file
        fits.close()

        # Return
        return skymap.extract(0)
Пример #29
0
    def _init_ts_map(self, fitsfile):
        """
        Initialise Test Statistic map.
        """
        # Set filename
        self._in_filename = fitsfile

        # Open FITS file
        fits = gammalib.GFits(fitsfile)

        # Read TS and status maps
        self._tsmap = gammalib.GSkyMap()
        self._tsmap.read(fits[0])
        self._statusmap = gammalib.GSkyMap()
        self._statusmap.read(fits["STATUS MAP"])

        # Get other maps
        self._maps = []
        self._mapnames = []

        # Loop over extensions
        for hdu in fits:

            # Leave out primary and status extension
            if hdu.extname() != "IMAGE" and hdu.extname() != "STATUS MAP":

                # Add present maps
                skymap = gammalib.GSkyMap()
                skymap.read(hdu)
                self._maps.append(skymap)
                self._mapnames.append(hdu.extname())

        # Close FITS file
        fits.close()

        # Return
        return
Пример #30
0
    def _check_light_curve(self, filename, bins):
        """
        Check light curve file
        """
        # Expected column names
        cols = [
            'MJD', 'e_MJD', 'Prefactor', 'e_Prefactor', 'Index', 'e_Index',
            'TS', 'UpperLimit'
        ]

        # Open FITS file
        fits = gammalib.GFits(filename)

        # Check FITS file structure
        self.test_value(fits.size(), 2,
                        'Check for 2 extensions in light curve FITS file')
        self.test_assert(fits.contains('LIGHTCURVE'),
                         'FITS file contains "LIGHTCURVE" extension')

        # Get LIGHTCURVE table
        table = fits['LIGHTCURVE']

        # Check FITS table structure
        self.test_value(
            table.ncols(), len(cols),
            'Check for %d columns in light curve FITS table' % len(cols))
        self.test_value(table.nrows(), bins,
                        'Check for %d rows in light curve FITS table' % bins)
        for col in cols:
            self.test_assert(table.contains(col),
                             'FITS file contains "' + col + '" column')

        # Close FITS file
        fits.close()

        # Return
        return