예제 #1
0
    def test_display_sky_map(self):

        pu = PlottingUtils(self.config, self.agilepyLogger)

        smooth = 4
        fileFormat = ".png"
        title = "testcase"
        cmap = "CMRmap"
        regFiles = [
            Utils._expandEnvVar("$AGILE/catalogs/2AGL_2.reg"),
            Utils._expandEnvVar("$AGILE/catalogs/2AGL_2.reg")
        ]
        regFileColors = ["yellow", "blue"]


        file = pu.displaySkyMap(
                    self.datadir+"/testcase_EMIN00100_EMAX00300_01.cts.gz", \
                    smooth = smooth,
                    fileFormat = fileFormat,
                    title = title,
                    cmap = cmap,
                    regFiles = regFiles,
                    regFileColors=regFileColors,
                    catalogRegions = "2AGL",
                    catalogRegionsColor = "red",
                    saveImage=True,
                    normType="linear")

        assert True == os.path.isfile(file)
예제 #2
0
    def test_get_first_and_last_line_in_file(self):

        line1 = '/ASDC_PROC2/FM3.119_2/EVT/agql2004151750_2004151850.EVT__FM.gz 514057762.000000 514061362.000000 EVT\n'
        line2 = '/ASDC_PROC2/FM3.119_2/EVT/agql2004152249_2004160008.EVT__FM.gz 514075704.000000 514080437.000000 EVT\n'
        line3 = '/ASDC_PROC2/FM3.119_2/EVT/agql2004160008_2004160045.EVT__FM.gz 514080437.000000 514082644.000000 EVT\n'

        # I test: 1 line
        test_file = self.tmpDir.joinpath("test_file1.txt")
        with open(test_file, "w") as f:
            f.write(line1)
        (first, last) = Utils._getFirstAndLastLineInFile(test_file)
        assert first == line1
        assert last == line1

        # II test: 2 lines
        test_file = self.tmpDir.joinpath("test_file2.txt")
        with open(test_file, "w") as f:
            f.write(line1)
            f.write(line2)
        (first, last) = Utils._getFirstAndLastLineInFile(test_file)
        assert first == line1
        assert last == line2

        # III test: 3 lines
        test_file = self.tmpDir.joinpath("test_file3.txt")
        with open(test_file, "w") as f:
            f.write(line1)
            f.write(line2)
            f.write(line3)
        (first, last) = Utils._getFirstAndLastLineInFile(test_file)
        assert first == line1
        assert last == line3
    def visibilityPlot(self,
                       time_windows,
                       ra,
                       dec,
                       fermi_datapath,
                       agile_datapath,
                       run,
                       zmax,
                       mode=all,
                       step=1):
        """It runs offaxis tools and creates a directory containing the result files
        
        Args:
            time_windws (2d float Array): It contains the tstart-tstop intervals to process the data, the structure has developed as a 2d array(eg [[t1,t2],[t3, t4], ..., [tn-1, tn]])
            ra (float): ra value
            dec (float): dec value
            fermi_datapath (str): fermi log filepath
            agile_datapath (str): agile log filepath
            run (integer): run number
            zmax (float): maximum offaxis degrees
            mode (str): options "agile" | "fermi" | "all": Select all to plot both data, otherwise it will plot only agile/fermi data 
            step (float): step value for plotting
        
        Returns:
            dir (str): A new directory containing the results
        """
        self.outdir = self.config.getOptionValue("outdir")

        agile_datapath = Utils._expandEnvVar(agile_datapath)
        fermi_datapath = Utils._expandEnvVar(fermi_datapath)

        offaxis = Create_offaxis_plot(time_windows,
                                      ra,
                                      dec,
                                      fermi_datapath,
                                      agile_datapath,
                                      run,
                                      zmax,
                                      mode,
                                      step,
                                      outdir=self.outdir,
                                      logger=self.logger)

        dir = offaxis.run()

        self.logger.info(self, "Output directory: %s", dir)

        return dir
예제 #4
0
    def _convertEnergyBinsStrings(confDict):

        l = []
        for stringList in confDict["maps"]["energybins"]:
            res = Utils._parseListNotation(stringList)
            l.append([int(r) for r in res])
        confDict["maps"]["energybins"] = l
    def getConfiguration(confFilePath, userName, outputDir, verboselvl):
        """Utility method to create a configuration file.

        Args:
            confFilePath (str): the path and filename of the configuration file that is going to be created.
            userName (str): the username of who is running the software.
            outputDir (str): the path to the output directory. The output directory will be created using the following format: 'userName_sourceName_todaydate'
            verboselvl (int): the verbosity level of the console output. Message types: level 0 => critical, warning, level 1 => critical, warning, info, level 2 => critical, warning, info, debug

        Returns:
            None
        """

        configuration = """
output:
  outdir: %s
  filenameprefix: eng_product
  logfilenameprefix: eng_log
  username: %s
  sourcename: agilefermioffaxis
  verboselvl: %d

        """ % (outputDir, userName, verboselvl)

        with open(Utils._expandEnvVar(confFilePath), "w") as cf:

            cf.write(configuration)
예제 #6
0
    def _convertBackgroundCoeff(confDict, bkgCoeffName):

        bkgCoeffVal = confDict["model"][bkgCoeffName]
        numberOfEnergyBins = len(confDict["maps"]["energybins"])
        fovbinnumber = confDict["maps"]["fovbinnumber"]
        numberOfMaps = numberOfEnergyBins * fovbinnumber

        if bkgCoeffVal is None:
            confDict["model"][bkgCoeffName] = [-1 for i in range(numberOfMaps)]

        # if -1
        elif bkgCoeffVal == -1:
            confDict["model"][bkgCoeffName] = [-1 for i in range(numberOfMaps)]

        # if only one value
        elif isinstance(bkgCoeffVal, numbers.Number):
            confDict["model"][bkgCoeffName] = [bkgCoeffVal]

        # if comma separated values
        elif isinstance(bkgCoeffVal, str):
            confDict["model"][bkgCoeffName] = Utils._parseListNotation(
                bkgCoeffVal)

        # if List
        elif isinstance(bkgCoeffVal, List):
            confDict["model"][bkgCoeffName] = bkgCoeffVal

        else:
            print(
                f"Something's wrong..bkgCoeffName: {bkgCoeffName}, bkgCoeffVal: {bkgCoeffVal}"
            )
            confDict["model"][bkgCoeffName] = None
예제 #7
0
    def __init__(self, configurationFilePath):

        self.config = AgilepyConfig()

        # load only "input" and "output" sections
        self.config.loadBaseConfigurations(
            Utils._expandEnvVar(configurationFilePath))

        # Creating output directory

        self.outdir = self.config.getConf("output", "outdir")

        Path(self.outdir).mkdir(parents=True, exist_ok=True)

        self.logger = AgilepyLogger()

        self.logger.initialize(
            self.outdir, self.config.getConf("output", "logfilenameprefix"),
            self.config.getConf("output", "verboselvl"))

        self.plottingUtils = PlottingUtils(self.config, self.logger)

        if "AGILE" not in os.environ:
            raise AGILENotFoundError("$AGILE is not set.")

        if "PFILES" not in os.environ:
            raise PFILESNotFoundError("$PFILES is not set.")
    def visibilityPlot(self,
                       logfilesIndex,
                       tmin,
                       tmax,
                       src_x,
                       src_y,
                       ref,
                       zmax=60,
                       step=1,
                       writeFiles=True,
                       computeHistogram=True,
                       saveImage=True,
                       fileFormat="png",
                       title="Visibility Plot"):
        """ It computes the angular separations between the center of the
        AGILE GRID field of view and the coordinates for a given position in the sky,
        given by src_ra and src_dec.

        Args:
            logfilesIndex (str): the index file for the logs files.
            tmin (float): inferior observation time limit to analize.
            tmax (float): superior observation time limit to analize.
            src_x (float): source position x (unit: degrees)
            src_y (float): source position y (unit: degrees)
            zmax (float): maximum zenith distance of the source to the center of the detector (unit: degrees)
            step (integer): time interval in seconds between 2 consecutive points in the resulting plot. Minimum accepted value: 0.1 s.
            writeFiles (bool): if True, two text files with the separions data will be written on file.
            saveImage (bool): If True, the image will be saved on disk
            fileFormat (str): The output format of the image
            title (str): The plot title

        Returns:
            separations (List): the angular separations
            ti_tt (List):
            tf_tt (List):
            ti_mjd (List):
            tf_mjd (List):
            skyCordsFK5.ra.deg
            skyCordsFK5.dec.deg
        """
        logfilesIndex = Utils._expandEnvVar(logfilesIndex)

        separations, ti_tt, tf_tt, ti_mjd, tf_mjd, src_ra, src_dec, sepFile = self._computePointingDistancesFromSource(
            logfilesIndex, tmin, tmax, src_x, src_y, ref, zmax, step,
            writeFiles)

        vis_plot = self.plottingUtils.visibilityPlot(separations, ti_tt, tf_tt,
                                                     ti_mjd, tf_mjd, src_ra,
                                                     src_dec, zmax, step,
                                                     saveImage, self.outdir,
                                                     fileFormat, title)
        hist_plot = None

        if computeHistogram:
            hist_plot = self.plottingUtils.visibilityHisto(
                separations, ti_tt, tf_tt, src_ra, src_dec, zmax, step,
                saveImage, self.outdir, fileFormat, title)

        return vis_plot, hist_plot
예제 #9
0
    def getConfiguration(confFilePath, userName, outputDir, verboselvl, ctsmap, scaletype, scalemin, scalemax, scalenum, methistsize, cclsizemin, cclsizemax, cclradmin, cclradmax, cclscalemin, cclscalemax):
        """Utility method to create a configuration file.

        Args:
            confFilePath (str): the path and filename of the configuration file that is going to be created.
            userName (str): the username of who is running the software.
            outputDir (str): the path to the output directory. The output directory will be created using the following format: 'userName_sourceName_todaydate'
            verboselvl (int): the verbosity level of the console output. Message types: level 0 => critical, warning, level 1 => critical, warning, info, level 2 => critical, warning, info, debug

            ctsmap (str): the path of cts map generated by AGAnalys class

            scaletype (str):
            scalemin (float):
            scalemax (float):
            scalenum (float):
            methistsize (float):
            cclsizemin (float)
            cclsizemax (float)
            cclradmin (float)
            cclradmax (float)
            cclscalemin (float)
            cclscalemax (float)

        Returns:
            None
        """

        configuration = """
output:
  outdir: %s
  filenameprefix: wavelet_product
  logfilenameprefix: eng_log
  username: %s
  sourcename: wavelet
  verboselvl: %d

map:
  ctsmap: %s

wavelet:
  scaletype: %s
  scalemin: %f
  scalemax: %f
  scalenum: %f
  methistsize: %f
  cclsizemin: %f
  cclsizemax: %f
  cclradmin: %f
  cclradmax: %f
  cclscalemin: %f
  cclscalemax: %f



        """%(outputDir, userName, verboselvl, ctsmap, scaletype, scalemin, scalemax, scalenum, methistsize, cclsizemin, cclsizemax, cclradmin, cclradmax, cclscalemin, cclscalemax)

        with open(Utils._expandEnvVar(confFilePath),"w") as cf:

            cf.write(configuration)
예제 #10
0
    def loadSourcesFromCatalog(self,
                               catalogName,
                               rangeDist=(0, float("inf")),
                               show=False):

        supportedCatalogs = ["2AGL"]
        scaleFlux = False

        if catalogName == "2AGL":
            catPath = Utils._expandEnvVar("$AGILE/catalogs/2AGL.xml")
            if not Path(catPath).exists():
                catPath = Utils._expandEnvVar("$AGILE/catalogs/2AGL.multi")

            cat2Emin, cat2Emax = Parameters.getCat2EminEmax()
            uEmin = self.config.getOptionValue("emin_sources")
            uEmax = self.config.getOptionValue("emax_sources")

            if cat2Emin != uEmin or cat2Emax != uEmax:
                scaleFlux = True
                self.logger.info(
                    self,
                    f"The input energy range ({uEmin},{uEmax}) is different to the CAT2 energy range ({cat2Emin},{cat2Emax}). A scaling of the sources flux will be performed."
                )

        elif catalogName == "4FGL":

            scaleFlux = False
            raise FileNotFoundError(
                f"The catalog {catalogName} is going to be supported soon. Supported catalogs: {supportedCatalogs}"
            )

        else:
            self.logger.critical(
                self,
                "The catalog %s is not supported. Supported catalogs: %s",
                catalogName, ' '.join(supportedCatalogs))
            raise FileNotFoundError(
                f"The catalog {catalogName} is not supported. Supported catalogs: {supportedCatalogs}"
            )

        return self.loadSourcesFromFile(catPath,
                                        rangeDist,
                                        scaleFlux=scaleFlux,
                                        show=show)
예제 #11
0
    def _getRegionsFiles(self, regFiles, catalogRegions):

        regionsFiles = []
        if regFiles:
            for regionFile in regFiles:
                if regionFile:
                    regionFile = Utils._expandEnvVar(regionFile)
                    self.logger.info(self,
                                     "The region catalog %s will be loaded.",
                                     regionFile)

                regionsFiles.append(regionFile)

        regionsFilesDict = self.getSupportedRegionsCatalogs()
        if catalogRegions in regionsFilesDict.keys():
            catalogRegions = Utils._expandEnvVar(
                regionsFilesDict[catalogRegions])
            self.logger.info(self, "The region catalog %s will be loaded.",
                             catalogRegions)

        regionsFiles.append(catalogRegions)

        return regionsFiles
예제 #12
0
    def _validateTimeInIndex(confDict):
        errors = {}

        if (confDict["input"]["userestapi"] == True):
            return errors

        (first,
         last) = Utils._getFirstAndLastLineInFile(confDict["input"]["evtfile"])

        idxTmin = Utils._extractTimes(first)[0]
        idxTmax = Utils._extractTimes(last)[1]

        userTmin = confDict["selection"]["tmin"]
        userTmax = confDict["selection"]["tmax"]
        timetype = confDict["selection"]["timetype"]

        if timetype == "MJD":
            userTmin = AstroUtils.time_mjd_to_agile_seconds(userTmin)
            userTmax = AstroUtils.time_mjd_to_agile_seconds(userTmax)

        if float(userTmin) < float(idxTmin):
            errors["input/tmin"]="tmin: {} is outside the time range of {} (tmin < indexTmin). Index file time range: [{}, {}]" \
                                  .format(userTmin, confDict["input"]["evtfile"], idxTmin, idxTmax)

        if float(userTmin) > float(idxTmax):
            errors["input/tmin"]="tmin: {} is outside the time range of {} (tmin > indexTmax). Index file time range: [{}, {}]" \
                                  .format(userTmin, confDict["input"]["evtfile"], idxTmin, idxTmax)

        if float(userTmax) > float(idxTmax):
            errors["input/tmax"]="tmax: {} is outside the time range of {} (tmax > indexTmax). Index file time range: [{}, {}]" \
                                  .format(userTmax, confDict["input"]["evtfile"], idxTmin, idxTmax)

        if float(userTmax) < float(idxTmin):
            errors["input/tmax"]="tmax: {} is outside the time range of {} (tmax < indexTmin). Index file time range: [{}, {}]" \
                                  .format(userTmax, confDict["input"]["evtfile"], idxTmin, idxTmax)

        return errors
예제 #13
0
    def convertCatalogToXml(self, catalogFilepath):

        catalogFilepath = Utils._expandEnvVar(catalogFilepath)

        filename, fileExtension = splitext(catalogFilepath)

        supportedFormats = [".multi", ".txt"]
        if fileExtension not in supportedFormats:
            raise SourceModelFormatNotSupported(
                "Format of {} not supported. Supported formats: {}".format(
                    catalogFilepath, ' '.join(supportedFormats)))

        newSources = self._loadFromSourcesTxt(catalogFilepath)

        return self.writeToFile(filename, fileformat="xml", sources=newSources)
    def test_compute_pointing_distances_from_source(self):

        # file = "/data/AGILE/LOG_INDEX/LOG.log.index"
        zmax = 60
        step = 10
        logfilesIndex = "$AGILE/agilepy-test-data/test_dataset_6.0/LOG/LOG.index"

        logfilesIndex = Utils._expandEnvVar(logfilesIndex)

        _, _, _, _, _, _, _, separationFile = self.ageng._computePointingDistancesFromSource(
            logfilesIndex,
            456361778,
            456373279,
            src_x=129.7,
            src_y=3.7,
            ref="gal",
            zmax=zmax,
            step=step,
            writeFiles=True)
예제 #15
0
    def loadSourcesFromFile(self,
                            filePath,
                            rangeDist=(0, float("inf")),
                            scaleFlux=False,
                            show=False):

        filePath = Utils._expandEnvVar(filePath)

        _, fileExtension = splitext(filePath)

        supportFormats = [".txt", ".xml", ".multi"]

        if fileExtension not in supportFormats:

            raise SourceModelFormatNotSupported(
                "Format of {} not supported. Supported formats: {}".format(
                    filePath, ' '.join(supportFormats)))

        if fileExtension == ".xml":
            newSources = self._loadFromSourcesXml(filePath)

        elif fileExtension == ".txt" or fileExtension == ".multi":
            newSources = self._loadFromSourcesTxt(filePath)

        if newSources is None:
            self.logger.critical(self, "Errors during %s parsing (%s)",
                                 filePath, fileExtension)
            raise SourcesFileLoadingError(
                "Errors during {} parsing ({})".format(filePath,
                                                       fileExtension))

        mapCenterL = float(self.config.getOptionValue("glon"))
        mapCenterB = float(self.config.getOptionValue("glat"))

        for newSource in newSources:
            newSource.setDistanceFromMapCenter(mapCenterL, mapCenterB)

        filteredSources = [
            source for source in self._filterByDistance(newSources, rangeDist)
        ]

        filteredSources = [
            source for source in self._discardIfAlreadyExist(filteredSources)
        ]

        if scaleFlux:

            filteredSources = [
                source for source in self._scaleSourcesFlux(filteredSources)
            ]

        if show:
            for s in filteredSources:
                print(f"{s}")

        for source in filteredSources:

            self.sources.append(source)

        self.logger.info(self, "Loaded %d sources. Total sources: %d",
                         len(filteredSources), len(self.sources))

        return filteredSources
    def _computeSeparationPerFile(self, doTimeMask, logFile, tmin_start,
                                  tmax_start, skyCordsFK5, zmax, step):

        logFile = Utils._expandEnvVar(logFile)

        hdulist = fits.open(logFile)
        SC = hdulist[1].data
        self.logger.debug(self, "Total events: %f", len(SC["TIME"]))
        self.logger.debug(self, "tmin: %f", tmin_start)
        self.logger.debug(self, "tmin log file: %f", SC["TIME"][0])
        self.logger.debug(self, "tmax: %f", tmax_start)
        self.logger.debug(self, "tmax log file: %f", SC["TIME"][-1])

        self.logger.debug(self, "Do time mask? %d", doTimeMask)

        if doTimeMask:

            self.logger.debug(self, "How many times are >= tmin_start? %d",
                              np.sum(SC['TIME'] >= tmin_start))
            self.logger.debug(self, "How many times are <= tmax_start? %d",
                              np.sum(SC['TIME'] <= tmax_start))

            # Filtering out
            booleanMask = np.logical_and(SC['TIME'] >= tmin_start,
                                         SC['TIME'] <= tmax_start)
            TIME = SC['TIME'][booleanMask]
            ATTITUDE_RA_Y = SC['ATTITUDE_RA_Y'][booleanMask]
            ATTITUDE_DEC_Y = SC['ATTITUDE_DEC_Y'][booleanMask]
            self.logger.debug(
                self, "Time mask: %d values skipped" %
                (np.sum(np.logical_not(booleanMask))))

        else:
            TIME = SC['TIME']
            ATTITUDE_RA_Y = SC['ATTITUDE_RA_Y']
            ATTITUDE_DEC_Y = SC['ATTITUDE_DEC_Y']

        hdulist.close()

        # This is to avoid problems with moments for which the AGILE pointing was set to RA=NaN, DEC=NaN
        booleanMaskRA = np.logical_not(np.isnan(ATTITUDE_RA_Y))
        booleanMaskDEC = np.logical_not(np.isnan(ATTITUDE_DEC_Y))

        booleanMaskRADEC = np.logical_or(booleanMaskRA, booleanMaskDEC)

        TIME = TIME[booleanMaskRA]
        ATTITUDE_RA_Y = ATTITUDE_RA_Y[booleanMaskRADEC]
        ATTITUDE_DEC_Y = ATTITUDE_DEC_Y[booleanMaskRADEC]

        self.logger.debug(
            self,
            "Not-null mask RA/DEC (at least one NULL): %d values skipped" %
            (np.sum(np.logical_not(booleanMaskRADEC))))

        deltatime = 0.1  # AGILE attitude is collected every 0.1 s

        # tmin = np.min(TIME)
        # tmax = np.max(TIME)

        index_ti = 0
        index_tf = len(TIME) - 1

        self.logger.debug(self, "Step is: %f", step)

        indexstep = int(step * 10)  # if step 0.1 , indexstep=1 => all values
        # if step 1 , indexstep=10 => one value on 10 values

        self.logger.debug(self, "indexstep is: %f", indexstep)

        # creating arrays filled with zeros
        src_raz = np.zeros(len(TIME[index_ti:index_tf:indexstep]))
        src_decz = np.zeros(len(TIME[index_ti:index_tf:indexstep]))

        self.logger.debug(self, "Number of separations to be computed: %f",
                          index_tf / indexstep)

        # filling the just created arrays with our coordinates of interest
        src_ra = src_raz + skyCordsFK5.ra
        src_dec = src_decz + skyCordsFK5.dec

        c1 = SkyCoord(src_ra, src_dec, unit='deg', frame='icrs')
        c2 = SkyCoord(ATTITUDE_RA_Y[index_ti:index_tf:indexstep],
                      ATTITUDE_DEC_Y[index_ti:index_tf:indexstep],
                      unit='deg',
                      frame='icrs')
        #        print 'c1=', len(c1), 'c2=', len(c2) # to ensure c1 and c2 have the same length
        sep = c2.separation(c1)

        self.logger.debug(self,
                          "Number of computed separation: %f" % (len(sep)))

        return np.asfarray(sep), TIME[index_ti:index_tf:indexstep], TIME[
            index_ti:index_tf:indexstep] + deltatime
예제 #17
0
 def _expandFileEnvVars(confDict, filetype):
     confDict["input"][filetype] = Utils._expandEnvVar(
         confDict["input"][filetype])
예제 #18
0
 def _expandOutdirEnvVars(confDict):
     confDict["output"]["outdir"] = Utils._expandEnvVar(
         confDict["output"]["outdir"])