def test_display_sky_map(self): pu = PlottingUtils(self.config, self.agilepyLogger) smooth = 4 fileFormat = ".png" title = "testcase" cmap = "CMRmap" regFiles = [ Utils._expandEnvVar("$AGILE/catalogs/2AGL_2.reg"), Utils._expandEnvVar("$AGILE/catalogs/2AGL_2.reg") ] regFileColors = ["yellow", "blue"] file = pu.displaySkyMap( self.datadir+"/testcase_EMIN00100_EMAX00300_01.cts.gz", \ smooth = smooth, fileFormat = fileFormat, title = title, cmap = cmap, regFiles = regFiles, regFileColors=regFileColors, catalogRegions = "2AGL", catalogRegionsColor = "red", saveImage=True, normType="linear") assert True == os.path.isfile(file)
def visibilityPlot(self, time_windows, ra, dec, fermi_datapath, agile_datapath, run, zmax, mode=all, step=1): """It runs offaxis tools and creates a directory containing the result files Args: time_windws (2d float Array): It contains the tstart-tstop intervals to process the data, the structure has developed as a 2d array(eg [[t1,t2],[t3, t4], ..., [tn-1, tn]]) ra (float): ra value dec (float): dec value fermi_datapath (str): fermi log filepath agile_datapath (str): agile log filepath run (integer): run number zmax (float): maximum offaxis degrees mode (str): options "agile" | "fermi" | "all": Select all to plot both data, otherwise it will plot only agile/fermi data step (float): step value for plotting Returns: dir (str): A new directory containing the results """ self.outdir = self.config.getOptionValue("outdir") agile_datapath = Utils._expandEnvVar(agile_datapath) fermi_datapath = Utils._expandEnvVar(fermi_datapath) offaxis = Create_offaxis_plot(time_windows, ra, dec, fermi_datapath, agile_datapath, run, zmax, mode, step, outdir=self.outdir, logger=self.logger) dir = offaxis.run() self.logger.info(self, "Output directory: %s", dir) return dir
def __init__(self, configurationFilePath): self.config = AgilepyConfig() # load only "input" and "output" sections self.config.loadBaseConfigurations( Utils._expandEnvVar(configurationFilePath)) # Creating output directory self.outdir = self.config.getConf("output", "outdir") Path(self.outdir).mkdir(parents=True, exist_ok=True) self.logger = AgilepyLogger() self.logger.initialize( self.outdir, self.config.getConf("output", "logfilenameprefix"), self.config.getConf("output", "verboselvl")) self.plottingUtils = PlottingUtils(self.config, self.logger) if "AGILE" not in os.environ: raise AGILENotFoundError("$AGILE is not set.") if "PFILES" not in os.environ: raise PFILESNotFoundError("$PFILES is not set.")
def getConfiguration(confFilePath, userName, outputDir, verboselvl): """Utility method to create a configuration file. Args: confFilePath (str): the path and filename of the configuration file that is going to be created. userName (str): the username of who is running the software. outputDir (str): the path to the output directory. The output directory will be created using the following format: 'userName_sourceName_todaydate' verboselvl (int): the verbosity level of the console output. Message types: level 0 => critical, warning, level 1 => critical, warning, info, level 2 => critical, warning, info, debug Returns: None """ configuration = """ output: outdir: %s filenameprefix: eng_product logfilenameprefix: eng_log username: %s sourcename: agilefermioffaxis verboselvl: %d """ % (outputDir, userName, verboselvl) with open(Utils._expandEnvVar(confFilePath), "w") as cf: cf.write(configuration)
def visibilityPlot(self, logfilesIndex, tmin, tmax, src_x, src_y, ref, zmax=60, step=1, writeFiles=True, computeHistogram=True, saveImage=True, fileFormat="png", title="Visibility Plot"): """ It computes the angular separations between the center of the AGILE GRID field of view and the coordinates for a given position in the sky, given by src_ra and src_dec. Args: logfilesIndex (str): the index file for the logs files. tmin (float): inferior observation time limit to analize. tmax (float): superior observation time limit to analize. src_x (float): source position x (unit: degrees) src_y (float): source position y (unit: degrees) zmax (float): maximum zenith distance of the source to the center of the detector (unit: degrees) step (integer): time interval in seconds between 2 consecutive points in the resulting plot. Minimum accepted value: 0.1 s. writeFiles (bool): if True, two text files with the separions data will be written on file. saveImage (bool): If True, the image will be saved on disk fileFormat (str): The output format of the image title (str): The plot title Returns: separations (List): the angular separations ti_tt (List): tf_tt (List): ti_mjd (List): tf_mjd (List): skyCordsFK5.ra.deg skyCordsFK5.dec.deg """ logfilesIndex = Utils._expandEnvVar(logfilesIndex) separations, ti_tt, tf_tt, ti_mjd, tf_mjd, src_ra, src_dec, sepFile = self._computePointingDistancesFromSource( logfilesIndex, tmin, tmax, src_x, src_y, ref, zmax, step, writeFiles) vis_plot = self.plottingUtils.visibilityPlot(separations, ti_tt, tf_tt, ti_mjd, tf_mjd, src_ra, src_dec, zmax, step, saveImage, self.outdir, fileFormat, title) hist_plot = None if computeHistogram: hist_plot = self.plottingUtils.visibilityHisto( separations, ti_tt, tf_tt, src_ra, src_dec, zmax, step, saveImage, self.outdir, fileFormat, title) return vis_plot, hist_plot
def getConfiguration(confFilePath, userName, outputDir, verboselvl, ctsmap, scaletype, scalemin, scalemax, scalenum, methistsize, cclsizemin, cclsizemax, cclradmin, cclradmax, cclscalemin, cclscalemax): """Utility method to create a configuration file. Args: confFilePath (str): the path and filename of the configuration file that is going to be created. userName (str): the username of who is running the software. outputDir (str): the path to the output directory. The output directory will be created using the following format: 'userName_sourceName_todaydate' verboselvl (int): the verbosity level of the console output. Message types: level 0 => critical, warning, level 1 => critical, warning, info, level 2 => critical, warning, info, debug ctsmap (str): the path of cts map generated by AGAnalys class scaletype (str): scalemin (float): scalemax (float): scalenum (float): methistsize (float): cclsizemin (float) cclsizemax (float) cclradmin (float) cclradmax (float) cclscalemin (float) cclscalemax (float) Returns: None """ configuration = """ output: outdir: %s filenameprefix: wavelet_product logfilenameprefix: eng_log username: %s sourcename: wavelet verboselvl: %d map: ctsmap: %s wavelet: scaletype: %s scalemin: %f scalemax: %f scalenum: %f methistsize: %f cclsizemin: %f cclsizemax: %f cclradmin: %f cclradmax: %f cclscalemin: %f cclscalemax: %f """%(outputDir, userName, verboselvl, ctsmap, scaletype, scalemin, scalemax, scalenum, methistsize, cclsizemin, cclsizemax, cclradmin, cclradmax, cclscalemin, cclscalemax) with open(Utils._expandEnvVar(confFilePath),"w") as cf: cf.write(configuration)
def loadSourcesFromCatalog(self, catalogName, rangeDist=(0, float("inf")), show=False): supportedCatalogs = ["2AGL"] scaleFlux = False if catalogName == "2AGL": catPath = Utils._expandEnvVar("$AGILE/catalogs/2AGL.xml") if not Path(catPath).exists(): catPath = Utils._expandEnvVar("$AGILE/catalogs/2AGL.multi") cat2Emin, cat2Emax = Parameters.getCat2EminEmax() uEmin = self.config.getOptionValue("emin_sources") uEmax = self.config.getOptionValue("emax_sources") if cat2Emin != uEmin or cat2Emax != uEmax: scaleFlux = True self.logger.info( self, f"The input energy range ({uEmin},{uEmax}) is different to the CAT2 energy range ({cat2Emin},{cat2Emax}). A scaling of the sources flux will be performed." ) elif catalogName == "4FGL": scaleFlux = False raise FileNotFoundError( f"The catalog {catalogName} is going to be supported soon. Supported catalogs: {supportedCatalogs}" ) else: self.logger.critical( self, "The catalog %s is not supported. Supported catalogs: %s", catalogName, ' '.join(supportedCatalogs)) raise FileNotFoundError( f"The catalog {catalogName} is not supported. Supported catalogs: {supportedCatalogs}" ) return self.loadSourcesFromFile(catPath, rangeDist, scaleFlux=scaleFlux, show=show)
def _getRegionsFiles(self, regFiles, catalogRegions): regionsFiles = [] if regFiles: for regionFile in regFiles: if regionFile: regionFile = Utils._expandEnvVar(regionFile) self.logger.info(self, "The region catalog %s will be loaded.", regionFile) regionsFiles.append(regionFile) regionsFilesDict = self.getSupportedRegionsCatalogs() if catalogRegions in regionsFilesDict.keys(): catalogRegions = Utils._expandEnvVar( regionsFilesDict[catalogRegions]) self.logger.info(self, "The region catalog %s will be loaded.", catalogRegions) regionsFiles.append(catalogRegions) return regionsFiles
def convertCatalogToXml(self, catalogFilepath): catalogFilepath = Utils._expandEnvVar(catalogFilepath) filename, fileExtension = splitext(catalogFilepath) supportedFormats = [".multi", ".txt"] if fileExtension not in supportedFormats: raise SourceModelFormatNotSupported( "Format of {} not supported. Supported formats: {}".format( catalogFilepath, ' '.join(supportedFormats))) newSources = self._loadFromSourcesTxt(catalogFilepath) return self.writeToFile(filename, fileformat="xml", sources=newSources)
def test_compute_pointing_distances_from_source(self): # file = "/data/AGILE/LOG_INDEX/LOG.log.index" zmax = 60 step = 10 logfilesIndex = "$AGILE/agilepy-test-data/test_dataset_6.0/LOG/LOG.index" logfilesIndex = Utils._expandEnvVar(logfilesIndex) _, _, _, _, _, _, _, separationFile = self.ageng._computePointingDistancesFromSource( logfilesIndex, 456361778, 456373279, src_x=129.7, src_y=3.7, ref="gal", zmax=zmax, step=step, writeFiles=True)
def _expandFileEnvVars(confDict, filetype): confDict["input"][filetype] = Utils._expandEnvVar( confDict["input"][filetype])
def _expandOutdirEnvVars(confDict): confDict["output"]["outdir"] = Utils._expandEnvVar( confDict["output"]["outdir"])
def _computeSeparationPerFile(self, doTimeMask, logFile, tmin_start, tmax_start, skyCordsFK5, zmax, step): logFile = Utils._expandEnvVar(logFile) hdulist = fits.open(logFile) SC = hdulist[1].data self.logger.debug(self, "Total events: %f", len(SC["TIME"])) self.logger.debug(self, "tmin: %f", tmin_start) self.logger.debug(self, "tmin log file: %f", SC["TIME"][0]) self.logger.debug(self, "tmax: %f", tmax_start) self.logger.debug(self, "tmax log file: %f", SC["TIME"][-1]) self.logger.debug(self, "Do time mask? %d", doTimeMask) if doTimeMask: self.logger.debug(self, "How many times are >= tmin_start? %d", np.sum(SC['TIME'] >= tmin_start)) self.logger.debug(self, "How many times are <= tmax_start? %d", np.sum(SC['TIME'] <= tmax_start)) # Filtering out booleanMask = np.logical_and(SC['TIME'] >= tmin_start, SC['TIME'] <= tmax_start) TIME = SC['TIME'][booleanMask] ATTITUDE_RA_Y = SC['ATTITUDE_RA_Y'][booleanMask] ATTITUDE_DEC_Y = SC['ATTITUDE_DEC_Y'][booleanMask] self.logger.debug( self, "Time mask: %d values skipped" % (np.sum(np.logical_not(booleanMask)))) else: TIME = SC['TIME'] ATTITUDE_RA_Y = SC['ATTITUDE_RA_Y'] ATTITUDE_DEC_Y = SC['ATTITUDE_DEC_Y'] hdulist.close() # This is to avoid problems with moments for which the AGILE pointing was set to RA=NaN, DEC=NaN booleanMaskRA = np.logical_not(np.isnan(ATTITUDE_RA_Y)) booleanMaskDEC = np.logical_not(np.isnan(ATTITUDE_DEC_Y)) booleanMaskRADEC = np.logical_or(booleanMaskRA, booleanMaskDEC) TIME = TIME[booleanMaskRA] ATTITUDE_RA_Y = ATTITUDE_RA_Y[booleanMaskRADEC] ATTITUDE_DEC_Y = ATTITUDE_DEC_Y[booleanMaskRADEC] self.logger.debug( self, "Not-null mask RA/DEC (at least one NULL): %d values skipped" % (np.sum(np.logical_not(booleanMaskRADEC)))) deltatime = 0.1 # AGILE attitude is collected every 0.1 s # tmin = np.min(TIME) # tmax = np.max(TIME) index_ti = 0 index_tf = len(TIME) - 1 self.logger.debug(self, "Step is: %f", step) indexstep = int(step * 10) # if step 0.1 , indexstep=1 => all values # if step 1 , indexstep=10 => one value on 10 values self.logger.debug(self, "indexstep is: %f", indexstep) # creating arrays filled with zeros src_raz = np.zeros(len(TIME[index_ti:index_tf:indexstep])) src_decz = np.zeros(len(TIME[index_ti:index_tf:indexstep])) self.logger.debug(self, "Number of separations to be computed: %f", index_tf / indexstep) # filling the just created arrays with our coordinates of interest src_ra = src_raz + skyCordsFK5.ra src_dec = src_decz + skyCordsFK5.dec c1 = SkyCoord(src_ra, src_dec, unit='deg', frame='icrs') c2 = SkyCoord(ATTITUDE_RA_Y[index_ti:index_tf:indexstep], ATTITUDE_DEC_Y[index_ti:index_tf:indexstep], unit='deg', frame='icrs') # print 'c1=', len(c1), 'c2=', len(c2) # to ensure c1 and c2 have the same length sep = c2.separation(c1) self.logger.debug(self, "Number of computed separation: %f" % (len(sep))) return np.asfarray(sep), TIME[index_ti:index_tf:indexstep], TIME[ index_ti:index_tf:indexstep] + deltatime
def loadSourcesFromFile(self, filePath, rangeDist=(0, float("inf")), scaleFlux=False, show=False): filePath = Utils._expandEnvVar(filePath) _, fileExtension = splitext(filePath) supportFormats = [".txt", ".xml", ".multi"] if fileExtension not in supportFormats: raise SourceModelFormatNotSupported( "Format of {} not supported. Supported formats: {}".format( filePath, ' '.join(supportFormats))) if fileExtension == ".xml": newSources = self._loadFromSourcesXml(filePath) elif fileExtension == ".txt" or fileExtension == ".multi": newSources = self._loadFromSourcesTxt(filePath) if newSources is None: self.logger.critical(self, "Errors during %s parsing (%s)", filePath, fileExtension) raise SourcesFileLoadingError( "Errors during {} parsing ({})".format(filePath, fileExtension)) mapCenterL = float(self.config.getOptionValue("glon")) mapCenterB = float(self.config.getOptionValue("glat")) for newSource in newSources: newSource.setDistanceFromMapCenter(mapCenterL, mapCenterB) filteredSources = [ source for source in self._filterByDistance(newSources, rangeDist) ] filteredSources = [ source for source in self._discardIfAlreadyExist(filteredSources) ] if scaleFlux: filteredSources = [ source for source in self._scaleSourcesFlux(filteredSources) ] if show: for s in filteredSources: print(f"{s}") for source in filteredSources: self.sources.append(source) self.logger.info(self, "Loaded %d sources. Total sources: %d", len(filteredSources), len(self.sources)) return filteredSources