def runTSMap(self, row=-1, column=-1): """ Run a TS map using the configuration file given""" folder = self.config['out'] utils.mkdir_p(self.tsfolder) # This part is used to rerun either a row or a pixel. if row > 0: #rerun only 1 row ra = self.RAref + self.binsz * (row - self.npix / 2.) if column > 0: #rerun only 1 pixel dec = self.DECref + self.binsz * (column - self.npix / 2.) self.info('Run Pixel evaluation at ' + str(ra) + ' ' + str(dec)) self._launch(ra, dec, row, column) else: self.info('Run Row evaluation at ' + str(ra)) self._launch(ra, 0, row, 0) return # Normal operation : all row and piwel are computed for i in xrange(self.npix): #loop over the X axis ra = self.RAref + self.binsz * (i - self.npix / 2.) if self.config['TSMap'][ 'method'] == 'row': # a row is evaluated in one job # if row<0 or i==row: self.info('Run Row evaluation at ' + str(ra)) self._launch(ra, 0, i, 0) else: # each pixel is evaluated by one job for j in xrange(self.npix): #loop over the Y axis # if (row<0 and column<0) or (i==row and column<0) or (i==row and j==column): dec = self.DECref + self.binsz * (j - self.npix / 2.) self.info('Run Pixel evaluation at ' + str(ra) + ' ' + str(dec)) self._launch(ra, dec, i, j)
def XmlMaker(config): folder = config['out'] utils.mkdir_p(folder) # test if the user provide a catalog or not. #if not use the default one if config['environ']['FERMI_CATALOG_DIR'] == '': catalogDir = env.CATALOG_DIR print "use the default location of the catalog" else: catalogDir = config['environ']['FERMI_CATALOG_DIR'] if config['environ']['FERMI_CATALOG'] == '': catalog = catalogDir + "/" + env.CATALOG print "use the default catalog" else: catalog = catalogDir + "/" + config['environ']['FERMI_CATALOG'] print "Use the catalog : ", catalog print "Use the extended directory : ", CATALOG_TEMPLATE_DIR lib, doc = CreateLib() srclist = GetlistFromFits(config, catalog) WriteXml(lib, doc, srclist, config) Xml_to_Reg(folder + "/Roi_model", srclist, Prog=sys.argv[0])
def __init__(self, config, parent_filename=""): super(BayesianBlocks, self).__init__(config, parent_filename) self.LCfolder = self.folder + "/BayesianBlocks/" utils.mkdir_p(self.LCfolder) # Convert time if necessary if self.config['time']['type'] == 'MJD': self.config['time']['tmin'] = utils.MJD_to_met( self.config['time']['tmin']) self.config['time']['tmax'] = utils.MJD_to_met( self.config['time']['tmax']) self.config['time']['type'] == 'MET' elif self.config['time']['type'] == 'JD': self.config['time']['tmin'] = utils.JD_to_met( self.config['time']['tmin']) self.config['time']['tmax'] = utils.JD_to_met( self.config['time']['tmax']) self.config['time']['type'] == 'MET' self.tmin = self.config['time']['tmin'] self.tmax = self.config['time']['tmax'] self.p0 = self.config['BayesianBlocks']['p0'] self.config['Spectrum']['FitsGeneration'] = self.config[ 'BayesianBlocks']['FitsGeneration'] self.config['Spectrum']['FrozenSpectralIndex'] = self.config[ 'BayesianBlocks']['SpectralIndex'] self.config['UpperLimit']['TSlimit'] = self.config['BayesianBlocks'][ 'TSLightCurve'] # Check apperture light curve have benn run first self._CheckAppertureLightCurveFile()
def AppLC(infile): '''Main function of the apperture photometrie Lightcurve script. Read the config file and run the analysis''' ROOT.gROOT.SetBatch(ROOT.kTRUE) #Batch mode enricodir = environ.DIRS.get('ENRICO_DIR') fermidir = environ.DIRS.get('FERMI_DIR') config = get_config(infile) folder = config['out'] #Create a subfolder name LightCurve LCoutfolder = folder + "/" + AppLCPath utils.mkdir_p(LCoutfolder) #Change the ROI to the desired radius in degree, legacy 1 deg. try: config['space']['rad'] = config['AppLC']['rad'] except NameError: config['space']['rad'] = 1 Nbins = config['AppLC']['NLCbin'] #Number of bins #Get The time bin dt = (config['time']['tmax'] - config['time']['tmin']) / Nbins #sec Obs = Observation(LCoutfolder, config, tag="") if config['AppLC']["FitsGeneration"] == "yes": _log('gtselect', 'Select data from library') #run gtselect Obs.FirstCut() Obs.SelectEvents() _log('gtmktime', 'Update the GTI and cut data based on ROI') #run gtdiffresp Obs.MkTime() #Binning from data or using a fix bin size if config['AppLC']['binsFromData'] == "no": _log('gtbin', 'bin the data into a light-curve using fixe time bin' ) #run gtbin print "Use a dt of %2.2e seconds" % (dt) Obs.GtLCbin(dt=dt) else: spfile = fits.open(Obs.eventfile) diff = spfile[1].data.field(9)[1:-1] - spfile[1].data.field(9)[:-2] dt = np.min( diff ) / 2. ##Compute the delta T as being the min delta t between 2 events divided by 2 timefile = LCoutfolder + "/Timebin.txt" MakeTimebinFile(Obs, timefile) _log('gtbindef', 'define de bins') #run gtbindef Obs.GtBinDef(timefile) _log('gtbin', 'bin the data into a light-curve using bins based on data' ) #run gtbin Obs.GtLCbin(dt=0) _log('gtexposure', 'compute the exposure') #run gtexposure Obs.GtExposure() #Get Some usefull value here. This allow PlotAppLC to be call independently Nbins = config['AppLC']['NLCbin'] #Number of bins #Plot the results and dump into ascii files PlotAppLC(Nbins, LCoutfolder, Obs.lcfile)
def __init__(self, config): super(ModelTester,self).__init__() Loggin.Message.__init__(self) self.config = config self.folder = self.config['out'] utils.mkdir_p(self.folder+"/TestModel") self.modellist = ["PowerLaw","LogParabola","PLSuperExpCutoff"] ''' try: with open(self.folder+"/TestModel/Fit.pickle","r") as pfile: print("Retrieving previous Fit from %s" \ %(self.folder+"/TestModel/Fit.pickle")) self.FitRunner = pickle.load(pfile) self.Fit = self.FitRunner.CreateLikeObject() except: self._GenFit() self.FitRunner.PerformFit(self.Fit, False) with open(self.folder+"/TestModel/Fit.pickle","w") as pfile: print("Saving current Fit to %s" \ %(self.folder+"/TestModel/Fit.pickle")) pickle.dump(self.FitRunner,pfile) ''' self._GenFit() self.FitRunner.PerformFit(self.Fit, False) # Store the results in a dictionnary self.Results = {}
def runTSMap(self,row=-1,column=-1) : """ Run a TS map using the configuration file given""" folder = self.config['out'] utils.mkdir_p(self.tsfolder) # This part is used to rerun either a row or a pixel. if row>0:#rerun only 1 row ra = self.RAref + self.binsz*(row-self.npix/2.) if column>0: #rerun only 1 pixel dec = self.DECref + self.binsz*(column-self.npix/2.) self.info('Run Pixel evaluation at '+str(ra)+' '+str(dec)) self._launch(ra,dec,row,column) else : self.info('Run Row evaluation at '+str(ra)) self._launch(ra,0,row,0) return # Normal operation : all row and piwel are computed for i in xrange(self.npix): #loop over the X axis ra = self.RAref + self.binsz*(i-self.npix/2.) if self.config['TSMap']['method'] == 'row' : # a row is evaluated in one job # if row<0 or i==row: self.info('Run Row evaluation at '+str(ra)) self._launch(ra,0,i,0) else : # each pixel is evaluated by one job for j in xrange(self.npix): #loop over the Y axis # if (row<0 and column<0) or (i==row and column<0) or (i==row and j==column): dec = self.DECref + self.binsz*(j-self.npix/2.) self.info('Run Pixel evaluation at '+str(ra)+' '+str(dec)) self._launch(ra,dec,i,j)
def __init__(self, config): super(ModelTester, self).__init__() Loggin.Message.__init__(self) self.config = config self.folder = self.config['out'] utils.mkdir_p(self.folder + "/TestModel") self.modellist = ["PowerLaw", "LogParabola", "PLSuperExpCutoff"] ''' try: with open(self.folder+"/TestModel/Fit.pickle","r") as pfile: print("Retrieving previous Fit from %s" \ %(self.folder+"/TestModel/Fit.pickle")) self.FitRunner = pickle.load(pfile) self.Fit = self.FitRunner.CreateLikeObject() except: self._GenFit() self.FitRunner.PerformFit(self.Fit, False) with open(self.folder+"/TestModel/Fit.pickle","w") as pfile: print("Saving current Fit to %s" \ %(self.folder+"/TestModel/Fit.pickle")) pickle.dump(self.FitRunner,pfile) ''' self._GenFit() self.FitRunner.PerformFit(self.Fit, False) # Store the results in a dictionnary self.Results = {}
def AppLC(infile): '''Main function of the apperture photometrie Lightcurve script. Read the config file and run the analysis''' ROOT.gROOT.SetBatch(ROOT.kTRUE) #Batch mode enricodir = environ.DIRS.get('ENRICO_DIR') fermidir = environ.DIRS.get('FERMI_DIR') config = get_config(infile) folder = config['out'] #Create a subfolder name LightCurve LCoutfolder = folder+"/"+AppLCPath utils.mkdir_p(LCoutfolder) #Change the ROI to the desired radius in degree, legacy 1 deg. try: config['space']['rad'] = config['AppLC']['rad'] except NameError: config['space']['rad'] = 1 Nbins = config['AppLC']['NLCbin']#Number of bins #Get The time bin dt = (config['time']['tmax']-config['time']['tmin'])/Nbins #sec Obs = Observation(LCoutfolder, config, tag="") if config['AppLC']["FitsGeneration"] == "yes": _log('gtselect', 'Select data from library')#run gtselect Obs.FirstCut() Obs.SelectEvents() _log('gtmktime', 'Update the GTI and cut data based on ROI')#run gtdiffresp Obs.MkTime() #Binning from data or using a fix bin size if config['AppLC']['binsFromData'] == "no": _log('gtbin', 'bin the data into a light-curve using fixe time bin')#run gtbin print "Use a dt of %2.2e seconds"%(dt) Obs.GtLCbin(dt = dt) else: spfile=fits.open(Obs.eventfile) diff = spfile[1].data.field(9)[1:-1]-spfile[1].data.field(9)[:-2] dt = np.min(diff)/2. ##Compute the delta T as being the min delta t between 2 events divided by 2 timefile = LCoutfolder+"/Timebin.txt" MakeTimebinFile(Obs,timefile) _log('gtbindef', 'define de bins')#run gtbindef Obs.GtBinDef(timefile) _log('gtbin', 'bin the data into a light-curve using bins based on data')#run gtbin Obs.GtLCbin(dt = 0) _log('gtexposure', 'compute the exposure')#run gtexposure Obs.GtExposure() #Get Some usefull value here. This allow PlotAppLC to be call independently Nbins = config['AppLC']['NLCbin']#Number of bins #Plot the results and dump into ascii files PlotAppLC(Nbins,LCoutfolder,Obs.lcfile)
def Scan(config): ROOT.gROOT.SetBatch(ROOT.kTRUE) cres = ROOT.TCanvas("Scan") config["Spectrum"]["FitsGeneration"] = "no" FitRunner, Fit = RunGTlike.GenAnalysisObjects(config) spectrum = Fit[FitRunner.obs.srcname].funcs['Spectrum'] ParName = spectrum.paramNames Fit.fit(0, covar=False, optimizer=config['fitting']['optimizer']) for par in ParName: #Loop over the parameters and get value, error and scale if spectrum.getParam(par).isFree(): print "Scan for parameter ", par ParValue = spectrum.getParam(par).value() ParError = spectrum.getParam(par).error() bmin, bmax = spectrum.getParam(par).getBounds() bmin = max(bmin, ParValue - 10 * ParError) bmax = min(bmax, ParValue + 10 * ParError) param, loglike = MakeScan(Fit, spectrum, par, bmin, bmax, config['fitting']['optimizer']) #restore best fit parameters spectrum.getParam(par).setFree(1) ParValue = spectrum.getParam(par).setValue(ParValue) plt.figure() plt.plot(param, loglike, "-r") plt.title(par) plt.xlabel("Parameter: " + par) plt.ylabel("Log(Like)") utils.mkdir_p(config["out"] + "/" + cst.ScanPath) savefile = open( config["out"] + "/" + cst.ScanPath + "/Scan_" + par + ".txt", "w") for i in xrange(param.size): savefile.write(str(param[i]) + " " + str(loglike[i]) + "\n") savefile.close() plt.savefig(config["out"] + "/" + cst.ScanPath + "/Scan_" + par + ".png", dpi=150, facecolor='w', edgecolor='w', orientation='portrait', papertype=None, format=None, transparent=False, bbox_inches=None, pad_inches=0.1, frameon=None)
def plot_sed_fromconfig(config,ignore_missing_bins=False): utils.mkdir_p(config["out"]+"/Spectrum") srcname = config['target']['name'] Emin = config['energy']['emin'] Emax = config['energy']['emax'] filename = utils._SpecFileName(config) Param = Params(srcname, Emin=Emin, Emax=Emax, PlotName=filename) Result = utils.ReadResult(config) # if the TS > ts limit plot the butterfly, if not draw UL if Result["TS"]> config['UpperLimit']['TSlimit'] : PlotSED(config,Param,ignore_missing_bins) else : try : PlotUL(Param,config,Result['Ulvalue'],config['UpperLimit']['SpectralIndex']) except : print "Not able to plot an upper limit in a SED diagram. UL computed?"
def plot_sed_fromconfig(config, ignore_missing_bins=False): utils.mkdir_p(config["out"] + "/Spectrum") srcname = config['target']['name'] Emin = config['energy']['emin'] Emax = config['energy']['emax'] filename = utils._SpecFileName(config) Param = Params(srcname, Emin=Emin, Emax=Emax, PlotName=filename) Result = utils.ReadResult(config) # if the TS > ts limit plot the butterfly, if not draw UL if Result["TS"] > config['UpperLimit']['TSlimit']: PlotSED(config, Param, ignore_missing_bins) else: try: PlotUL(Param, config, Result['Ulvalue'], config['UpperLimit']['SpectralIndex']) except: print "Not able to plot an upper limit in a SED diagram. UL computed?"
def Scan(config): ROOT.gROOT.SetBatch(ROOT.kTRUE) cres = ROOT.TCanvas("Scan") config["Spectrum"]["FitsGeneration"] = "no" FitRunner,Fit = RunGTlike.GenAnalysisObjects(config) spectrum = Fit[FitRunner.obs.srcname].funcs['Spectrum'] ParName = spectrum.paramNames Fit.fit(0,covar=False,optimizer=config['fitting']['optimizer']) for par in ParName : #Loop over the parameters and get value, error and scale if spectrum.getParam(par).isFree(): print "Scan for parameter ",par ParValue = spectrum.getParam(par).value() ParError = spectrum.getParam(par).error() bmin,bmax = spectrum.getParam(par).getBounds() bmin = max(bmin,ParValue-10*ParError) bmax = min(bmax,ParValue+10*ParError) param,loglike = MakeScan(Fit,spectrum,par,bmin,bmax,config['fitting']['optimizer']) #restore best fit parameters spectrum.getParam(par).setFree(1) ParValue = spectrum.getParam(par).setValue(ParValue) plt.figure() plt.plot(param,loglike,"-r") plt.title(par) plt.xlabel("Parameter: "+par) plt.ylabel("Log(Like)") utils.mkdir_p(config["out"]+"/"+cst.ScanPath) savefile = open(config["out"]+"/"+cst.ScanPath+ "/Scan_"+par+".txt","w") for i in xrange(param.size): savefile.write(str(param[i])+" "+str(loglike[i])+"\n") savefile.close() plt.savefig(config["out"]+"/"+cst.ScanPath+ "/Scan_"+par+".png", dpi=150, facecolor='w', edgecolor='w', orientation='portrait', papertype=None, format=None, transparent=False, bbox_inches=None, pad_inches=0.1, frameon=None)
def XmlMaker(config): folder = config['out'] utils.mkdir_p(folder) # test if the user provide a catalog or not. #if not use the default one if config['environ']['FERMI_CATALOG_DIR'] == '': catalogDir = env.CATALOG_DIR print "use the default location of the catalog" else: catalogDir = config['environ']['FERMI_CATALOG_DIR'] if config['environ']['FERMI_CATALOG'] == '': catalog = catalogDir + "/" + env.CATALOG print "use the default catalog" else: catalog = catalogDir + "/" + config['environ']['FERMI_CATALOG'] print "Use the catalog : ", catalog lib, doc = CreateLib() srclist = GetlistFromFits(config, catalog) # deal with the summedlike analysis xml = config["file"]["xml"] if config['ComponentAnalysis']['FrontBack'] == "yes": config["event"]["evtype"] = 1 config["file"]["xml"] = xml.replace(".xml","_FRONT.xml") WriteXml(lib, doc, srclist, config) lib, doc = CreateLib() config["event"]["evtype"] = 2 config["file"]["xml"] = xml.replace(".xml","_BACK.xml") WriteXml(lib, doc, srclist, config) elif config['ComponentAnalysis']['PSF'] == "yes": config["event"]["evtype"] = 4 config["file"]["xml"] = xml.replace(".xml","_PSF0.xml") WriteXml(lib, doc, srclist, config) lib, doc = CreateLib() config["event"]["evtype"] = 8 config["file"]["xml"] = xml.replace(".xml","_PSF1.xml") WriteXml(lib, doc, srclist, config) lib, doc = CreateLib() config["event"]["evtype"] = 16 config["file"]["xml"] = xml.replace(".xml","_PSF2.xml") WriteXml(lib, doc, srclist, config) lib, doc = CreateLib() config["event"]["evtype"] = 32 config["file"]["xml"] = xml.replace(".xml","_PSF3.xml") WriteXml(lib, doc, srclist, config) elif config['ComponentAnalysis']['EDISP'] == "yes": config["event"]["evtype"] = 64 config["file"]["xml"] = xml.replace(".xml","_EDISP0.xml") WriteXml(lib, doc, srclist, config) lib, doc = CreateLib() config["event"]["evtype"] = 128 config["file"]["xml"] = xml.replace(".xml","_EDISP1.xml") WriteXml(lib, doc, srclist, config) lib, doc = CreateLib() config["event"]["evtype"] = 256 config["file"]["xml"] = xml.replace(".xml","_EDISP2.xml") WriteXml(lib, doc, srclist, config) lib, doc = CreateLib() config["event"]["evtype"] = 512 config["file"]["xml"] = xml.replace(".xml","_EDISP3.xml") WriteXml(lib, doc, srclist, config) else : WriteXml(lib, doc, srclist, config) # Recover the old xml file. config["file"]["xml"] = xml Xml_to_Reg(folder + "/Roi_model", srclist, Prog=sys.argv[0])
def WriteXml(lib, doc, srclist, config): from enrico import Loggin mes = Loggin.Message() """Fill and write the library of sources into an XML file""" emin = config['energy']['emin'] emax = config['energy']['emax'] Galname = "GalDiffModel" Isoname = "IsoDiffModel" #test if the user provides diffuse files. if not use the default one if config['model']['diffuse_gal_dir'] == "": Gal_dir = env.DIFFUSE_DIR else: Gal_dir = config['model']['diffuse_gal_dir'] if config['model']['diffuse_iso_dir'] == "": Iso_dir = env.DIFFUSE_DIR else: Iso_dir = config['model']['diffuse_iso_dir'] if config['model']['diffuse_gal'] == "": Gal = Gal_dir + "/" + env.DIFFUSE_GAL else: Gal = Gal_dir + "/" + config['model']['diffuse_gal'] if config['model']['diffuse_iso'] == "": try : Iso = utils.GetIso(config["event"]["evclass"],config["event"]["evtype"]) if not(os.path.isfile(Iso)): raise RuntimeError except: mes.warning("Cannot find Iso file %s, please have a look. Switching to default one" %Iso) Iso = Iso_dir + "/" + env.DIFFUSE_ISO_SOURCE else: Iso = Iso_dir + "/" + config['model']['diffuse_iso'] #add diffuse sources addDiffusePL(lib, Iso, free=1, value=1.0, max=10.0, min=1.0, name=Isoname) addGalprop(lib, Gal, free=1, value=1.0, scale=1.0, max=10.0, min=.010, name=Galname) print "Iso model file ",Iso print "Galactic model file ",Gal yesnodict = {} for y in ['yes',True,'true',1,1.0,'1','1.0']: yesnodict[y] = 1 for n in ['no',False,'false',0,0.0,'0','0.0']: yesnodict[n] = 0 try: ebldict = {} ebldict['tau_norm'] = 1.0 ebldict['free_redshift'] = 0 # NOTE:ToDo ebldict['free_tau_norm'] = yesnodict[config['target']['fit_tau']] ebldict['redshift'] = float(config['target']['redshift']) ebldict['model'] = int(config['target']['ebl_model']) if ebldict['redshift'] < 1.e-3: ebldict = None except NameError: ebldict = None # loop over the list of sources and add it to the library for i in xrange(len(srclist)): name = srclist[i].get('name') if (name == config['target']['name']): ebl = ebldict else: ebl = None ra = srclist[i].get('ra') dec = srclist[i].get('dec') free = srclist[i].get('IsFree') spectype = srclist[i].get('SpectrumType') extendedName = srclist[i].get('ExtendedName') # Check the spectrum model if spectype.strip() == "PowerLaw": if (ebl==None): addPSPowerLaw1(lib, name, ra, dec, "None", eflux=srclist[i].get('scale'), flux_free=free, flux_value=srclist[i].get('flux'), index_free=free, index_value=srclist[i].get('index'),extendedName=extendedName) if (ebl!=None): addPSLogparabola(lib, name, ra, dec, ebl, norm_free=free, norm_value=srclist[i].get('flux'), alpha_free=free, alpha_value=abs(srclist[i].get('index')), beta_free=0, beta_min=0, beta_max=0, beta_value=0,extendedName=extendedName) elif spectype.strip() == "PowerLaw2": addPSPowerLaw2(lib, name, ra, dec, ebl, emin=emin, emax=emax, flux_free=free, flux_value=srclist[i].get('flux'), index_free=free, index_value=srclist[i].get('index'),extendedName=extendedName) elif spectype.strip() == "LogParabola": addPSLogparabola(lib, name, ra, dec, ebl, enorm=srclist[i].get('scale'), norm_free=free, norm_value=srclist[i].get('flux'), alpha_free=free, alpha_value=abs(srclist[i].get('index')), beta_free=free, beta_value=srclist[i].get('beta'),extendedName=extendedName) elif spectype.strip() == "PLExpCutoff" or spectype == "PLSuperExpCutoff" or spectype == "PLSuperExpCutoff2": addPSPLSuperExpCutoff(lib, name, ra, dec, ebl, eflux=srclist[i].get('scale'), flux_free=free, flux_value=srclist[i].get('flux'), index1_free=free, index1_value=srclist[i].get('index'), cutoff_free=free, cutoff_value=srclist[i].get('cutoff'),extendedName=extendedName) else: print('Warning!!!, unknown model %s' %spectype.strip()) folder = config['out'] utils.mkdir_p(folder) output = config['file']['xml'] mes.info("write the Xml file in "+output) open(output, 'w').write(doc.toprettyxml(' '))#save it
def Contour(config): # ROOT.gROOT.SetBatch(ROOT.kTRUE) # cres = ROOT.TCanvas("Contour") config["Spectrum"]["FitsGeneration"] = "no" parname1 = config["Contours"]["parname1"] parname2 = config["Contours"]["parname2"] FitRunner, Fit = RunGTlike.GenAnalysisObjects(config) spectrum = Fit[FitRunner.obs.srcname].funcs['Spectrum'] ParName = spectrum.paramNames mes = Loggin.Message() mes.info("Computing Contours for " + parname1 + " and " + parname2) ### Check part !!!! findpar2 = findpar1 = False for par in ParName: #Loop over the parameters to check if par == parname1: findpar1 = True if not (spectrum.getParam(par).isFree()): mes.error(parname1 + " is not a free parameter") if par == parname2: findpar2 = True if not (spectrum.getParam(par).isFree()): mes.error(parname2 + " is not a free parameter") if not (findpar1): mes.error(parname1 + " is not a valid parameter") if not (findpar2): mes.error(parname2 + " is not a valid parameter") bestloglike = Fit.fit(0, covar=False, optimizer=config['fitting']['optimizer']) print spectrum print "Min LogLikelihood =", bestloglike ## get values ParValue1 = spectrum.getParam(parname1).value() ParError1 = spectrum.getParam(parname1).error() bmin1, bmax1 = spectrum.getParam(parname1).getBounds() bmin1 = max(bmin1, ParValue1 - 20 * ParError1) bmax1 = min(bmax1, ParValue1 + 20 * ParError1) ParValue2 = spectrum.getParam(parname2).value() ParError2 = spectrum.getParam(parname2).error() bmin2, bmax2 = spectrum.getParam(parname2).getBounds() bmin2 = max(bmin2, ParValue2 - 20 * ParError2) bmax2 = min(bmax2, ParValue2 + 20 * ParError2) N = 100 param2 = numpy.zeros(N) loglike = ROOT.TH2F("loglike", "Contours (68%, 95%, 99%)", N, bmin1, bmax1, N, bmin2, bmax2) spectrum.getParam(parname2).setFree(0) mes.info("Boundaries for " + parname1 + " [" + str(bmin1) + "," + str(bmax1) + "]") mes.info("Boundaries for " + parname2 + " [" + str(bmin2) + "," + str(bmax2) + "]") for i in xrange(N): param2[i] = bmin2 + (bmax2 - bmin2) * i / (N - 1.) spectrum.getParam(parname2).setValue(param2[i]) param1, ll = MakeScan(Fit, spectrum, parname1, bmin1, bmax1, config['fitting']['optimizer'], N) for j in xrange(N): loglike.Fill(param1[j], param2[i], ll[j]) utils.mkdir_p(config["out"] + "/" + cst.ScanPath) cres = ROOT.TCanvas("Contours") loglike.SetMinimum(bestloglike) loglike.SetMaximum(bestloglike + 3) loglike.SetXTitle(parname1) loglike.SetYTitle(parname2) loglike.SetStats(000) loglike.SetContour(3) loglike.SetContourLevel(0, bestloglike + 0.5) loglike.SetContourLevel(1, bestloglike + 4. / 2.) loglike.SetContourLevel(2, bestloglike + 6.63 / 2.) loglike.Draw("CONT1") tgrres = ROOT.TGraphErrors(2, array.array('f', [ParValue1, ParValue1]), array.array('f', [ParValue2, ParValue2]), array.array('f', [ParError1, ParError1]), array.array('f', [ParError2, ParError2])) tgrres.Draw(".pz") cres.Print(config["out"] + "/" + cst.ScanPath + "/Contours_" + parname1 + "_" + parname2 + ".eps") cres.Print(config["out"] + "/" + cst.ScanPath + "/Contours_" + parname1 + "_" + parname2 + ".C") cres.Print(config["out"] + "/" + cst.ScanPath + "/Contours_" + parname1 + "_" + parname2 + ".png") mes.success("Scan Performed")
def PrepareEbin(Fit, FitRunner, sedresult=None): """ Prepare the computation of spectral point in energy bins by i) removing the weak sources (TS<1) # not true ii) updating the config file (option and energy) and save it in a new ascii file iii) changing the spectral model and saving it in a new xml file. A list of the ascii files is returned""" mes = Loggin.Message() NEbin = int(FitRunner.config['Ebin']['NumEnergyBins']) config = FitRunner.config config['verbose'] = 'no' #Be quiet #Replace the evt file with the fits file produced before #in order to speed up the production of the fits files config['file']['event'] = FitRunner.obs.eventcoarse #update the config to allow the fit in energy bins config['UpperLimit']['envelope'] = 'no' config['Ebin']['NumEnergyBins'] = '0' #no new bin in energy! config['target']['redshift'] = '0' #Disable EBL correction config['out'] = FitRunner.config['out'] + '/' + EbinPath + str(NEbin) config['Spectrum']['ResultPlots'] = 'no' #no SED plot/modelmap #copy the chose of the user for the enery bin computing config['Spectrum']['FitsGeneration'] = config['Ebin']['FitsGeneration'] config['UpperLimit']['TSlimit'] = config['Ebin']['TSEnergyBins'] tag = FitRunner.config['file']['tag'] Emax = float(FitRunner.config['energy']['emax']) Emin = float(FitRunner.config['energy']['emin']) lEmax = np.log10(Emax) lEmin = np.log10(Emin) utils._log("Preparing submission of fit into energy bins") print("Emin = {0} MeV".format(Emin), "Emax = {0} MeV".format(Emax), "Nbins = {0}".format(NEbin)) ener = utils.string_to_list(config['Ebin']['DistEbins']) if ener is None: if (config['ComponentAnalysis']['FGL4'] == 'yes' or config['Ebin']['DistEbins'] == 'FGL4'): ener = np.asarray([50, 1e2, 3e2, 1e3, 3e3, 1e4, 3e4, 3e5]) NEbin = len(ener) - 1 elif config['Ebin']['DistEbins'] in ['TS', 'mix' ] and sedresult != None: # Make the bins equispaced in sum(SED/SEDerr) - using the butterfly ipo = 0 iTS = sedresult.SED / sedresult.Err TScumula = 0 TSperbin = 1. * sum(iTS) / NEbin ener = [10**lEmin] while ipo < len(sedresult.E) - 1: TScumula += iTS[ipo] if TScumula / TSperbin > 1: ener.append(sedresult.E[ipo]) TScumula -= TSperbin ipo += 1 ener.append(10**lEmax) ener = np.array(ener) # intermediate approach (between both TS-spaced and logE spaced) if config['Ebin']['DistEbins'] == 'mix': ener = 0.5 * (ener + np.logspace(lEmin, lEmax, NEbin + 1)) else: # Make the bins equispaced in logE (standard) ener = np.logspace(lEmin, lEmax, NEbin + 1) # 1. Remove bins that are out of the range covered by the data # 2. Limit the bin extend to the range covered by the data. # Get elements strictly above threshold +1 element to the left for the left side # Get elements strictly below limit +1 element to the right side. # example. [1,2,3,4,5] -> if Emin=3.4, Emax=3.9 we want to keep [3.4,3.9]. ener = np.asarray(ener) print("Energy bins (before energy cuts): {0}".format(str(ener))) if len(ener) == 0: print("** Warning: energy bin array is empty") return (None) available_left = ener > Emin # In the example FFFTT -> [4,5] for k, use in enumerate(available_left[:-1]): if not use and available_left[k + 1]: available_left[k] = True # In the example FFTTT -> [3,5] available_right = ener < Emax # In the example TTTFF -> [1,3] for k, use in enumerate(available_right[1:]): if not use and available_right[k]: available_right[k + 1] = True # In the example TTTTF -> [1,4] available = available_left * available_right ener = ener[available] # In the example FFTTF -> [3,4] # Limit the range to the real energies that are covered by our data # If the energy bins are well placed this should not do anything. ener[0] = np.max([Emin, ener[0]]) ener[-1] = np.min([Emax, ener[-1]]) NEbin = len(ener) - 1 print("Energy bins (after energy cuts): {0}".format(str(ener))) if len(ener) == 0: print("** Warning: energy bin array is empty") return (None) utils.mkdir_p(config['out']) paramsfile = [] srcname = FitRunner.config['target']['name'] try: TSsrc = Fit.Ts(srcname) except RuntimeError: TSsrc = 0 if config['UpperLimit']['TSlimit'] > TSsrc: utils._log('Re-optimize', False) print "An upper limit has been computed. The fit need to be re-optimized" Fit.optimize(0) Pref = utils.ApproxPref(Fit, ener, srcname) Gamma = utils.ApproxGamma(Fit, ener, srcname) Model_type = Fit.model.srcs[srcname].spectrum().genericName() # if the model is not PowerLaw : change the model if not (Model_type == 'PowerLaw'): for comp in Fit.components: comp.logLike.getSource(srcname).setSpectrum( "PowerLaw") #Change model config['target']['spectrum'] = "PowerLaw" xmltag_list = [""] #handle summed like analysis if config['ComponentAnalysis']['FrontBack'] == "yes": xmltag_list = ["_FRONT", "_BACK"] mes.info("Splitting Front/Back events") elif config['ComponentAnalysis']['PSF'] == "yes": xmltag_list = ["_PSF0", "_PSF1", "_PSF2", "_PSF3"] mes.info("Splitting PSF events") elif config['ComponentAnalysis']['EDISP'] == "yes": xmltag_list = ["_EDISP0", "_EDISP1", "_EDISP2", "_EDISP3"] mes.info("Splitting EDISP events") elif config['ComponentAnalysis']['FGL4'] == "yes": from catalogComponents import evtnum, energybins, pixelsizes xmltag_list = [] for ebin_i in energybins: for k, evt in enumerate(evtnum): #if pixelsizes[ebin_i][k] > 0: try: xmltag_list.append("_{0}_En{1}".format( utils.typeirfs[k], ebin_i)) except KeyError: continue for ibin in xrange(NEbin): #Loop over the energy bins E = utils.GetE0(ener[ibin + 1], ener[ibin]) mes.info("Submitting # " + str(ibin) + " at energy " + str(E)) #Update the model for the bin for comp, xmltag in zip(Fit.components, xmltag_list): NewFitObject = ChangeModel(comp, ener[ibin], ener[ibin + 1], srcname, Pref[ibin], Gamma[ibin]) Xmlname = (config['out'] + "/" + srcname + "_" + str(ibin) + xmltag + ".xml") NewFitObject.writeXml(Xmlname) # dump the corresponding xml file config['file']['xml'] = Xmlname.replace(xmltag, "") #update the energy bounds config['energy']['emin'] = str(ener[ibin]) config['energy']['emax'] = str(ener[ibin + 1]) config['energy']['decorrelation_energy'] = "no" # Change the spectral index to follow the Estimated Gamma # if approximated Gamma is outside of bounds set it to limit Gamma_min = -5 Gamma_max = -0.501 Gamma_bin = -max(min(Gamma_max, Gamma[ibin]), Gamma_min) config['Spectrum']['FrozenSpectralIndex'] = Gamma_bin config['UpperLimit']['SpectralIndex'] = Gamma_bin config['file']['tag'] = tag + '_Ebin' + str(NEbin) + '_' + str(ibin) filename = config['target']['name'] + "_" + str(ibin) + ".conf" paramsfile.append(filename) config.write(open(config['out'] + '/' + filename, 'w')) #save the config file in a ascii file return paramsfile
def PrepareEbin(Fit, FitRunner, sedresult=None): """ Prepare the computation of spectral point in energy bins by i) removing the weak sources (TS<1) # not true ii) updating the config file (option and energy) and save it in a new ascii file iii) changing the spectral model and saving it in a new xml file. A list of the ascii files is returned""" mes = Loggin.Message() NEbin = int(FitRunner.config['Ebin']['NumEnergyBins']) config = FitRunner.config config['verbose'] = 'no' #Be quiet #Replace the evt file with the fits file produced before #in order to speed up the production of the fits files config['file']['event'] = FitRunner.obs.eventcoarse #update the config to allow the fit in energy bins config['UpperLimit']['envelope'] = 'no' config['Ebin']['NumEnergyBins'] = '0' #no new bin in energy! config['target']['redshift'] = '0' #Disable EBL correction config['out'] = FitRunner.config['out'] + '/' + EbinPath + str(NEbin) config['Spectrum']['ResultPlots'] = 'no' #no SED plot/modelmap #copy the chose of the user for the enery bin computing config['Spectrum']['FitsGeneration'] = config['Ebin']['FitsGeneration'] config['UpperLimit']['TSlimit'] = config['Ebin']['TSEnergyBins'] tag = FitRunner.config['file']['tag'] lEmax = np.log10(float(FitRunner.config['energy']['emax'])) lEmin = np.log10(float(FitRunner.config['energy']['emin'])) utils._log("Preparing submission of fit into energy bins") print(" Emin = ", float(FitRunner.config['energy']['emin']), " Emax = ", float(FitRunner.config['energy']['emax']), " Nbins = ", NEbin) if config['Ebin']['DistEbins'] in ['TS', 'mix'] and sedresult != None: # Make the bins equispaced in sum(SED/SEDerr) - using the butterfly ipo = 0 iTS = sedresult.SED / sedresult.Err TScumula = 0 TSperbin = 1. * sum(iTS) / NEbin ener = [10**lEmin] while ipo < len(sedresult.E) - 1: TScumula += iTS[ipo] if TScumula / TSperbin > 1: ener.append(sedresult.E[ipo]) TScumula -= TSperbin ipo += 1 ener.append(10**lEmax) ener = np.array(ener) # intermediate approach (between both TS-spaced and logE spaced) if config['Ebin']['DistEbins'] == 'mix': ener = 0.5 * (ener + np.logspace(lEmin, lEmax, NEbin + 1)) else: # Make the bins equispaced in logE (standard) ener = np.logspace(lEmin, lEmax, NEbin + 1) utils.mkdir_p(config['out']) paramsfile = [] srcname = FitRunner.config['target']['name'] if config['UpperLimit']['TSlimit'] > Fit.Ts(srcname): utils._log('Re-optimize', False) print "An upper limit has been computed. The fit need to be re-optmized" Fit.optimize(0) Pref = utils.ApproxPref(Fit, ener, srcname) Gamma = utils.ApproxGamma(Fit, ener, srcname) Model_type = Fit.model.srcs[srcname].spectrum().genericName() # if the model is not PowerLaw : change the model if not (Model_type == 'PowerLaw'): for comp in Fit.components: comp.logLike.getSource(srcname).setSpectrum( "PowerLaw") #Change model config['target']['spectrum'] = "PowerLaw" xmltag_list = [""] #handle summed like analysis if config['ComponentAnalysis']['FrontBack'] == "yes": xmltag_list = ["_FRONT", "_BACK"] mes.info("Splitting Front/Back events") elif config['ComponentAnalysis']['PSF'] == "yes": xmltag_list = ["_PSF0", "_PSF1", "_PSF2", "_PSF3"] mes.info("Splitting PSF events") elif config['ComponentAnalysis']['EDISP'] == "yes": xmltag_list = ["_EDISP0", "_EDISP1", "_EDISP2", "_EDISP3"] mes.info("Splitting EDISP events") for ibin in xrange(NEbin): #Loop over the energy bins E = utils.GetE0(ener[ibin + 1], ener[ibin]) mes.info("Submitting # " + str(ibin) + " at energy " + str(E)) #Update the model for the bin for comp, xmltag in zip(Fit.components, xmltag_list): NewFitObject = ChangeModel(comp, ener[ibin], ener[ibin + 1], srcname, Pref[ibin], Gamma[ibin]) Xmlname = (config['out'] + "/" + srcname + "_" + str(ibin) + xmltag + ".xml") NewFitObject.writeXml(Xmlname) # dump the corresponding xml file config['file']['xml'] = Xmlname.replace(xmltag, "") #update the energy bounds config['energy']['emin'] = str(ener[ibin]) config['energy']['emax'] = str(ener[ibin + 1]) config['energy']['decorrelation_energy'] = "no" # Change the spectral index to follow the Estimated Gamma # if approximated Gamma is outside of bounds set it to limit Gamma_min = -5 Gamma_max = 0.5 config['UpperLimit']['SpectralIndex'] = -min( max(Gamma_min, Gamma[ibin]), Gamma_max) config['file']['tag'] = tag + '_Ebin' + str(NEbin) + '_' + str(ibin) filename = config['target']['name'] + "_" + str(ibin) + ".conf" paramsfile.append(filename) config.write(open(config['out'] + '/' + paramsfile[ibin], 'w')) #save the config file in a ascii file return paramsfile
def run(infile): from enrico import utils from enrico import energybin from enrico.config import get_config from enrico import Loggin mes = Loggin.Message() """Run an entire Fermi analysis (spectrum) by reading a config file""" config = get_config(infile) folder = config['out'] utils.mkdir_p(folder) FitRunner,Fit = GenAnalysisObjects(config) # create all the fit files and run gtlike FitRunner.PerformFit(Fit) sedresult = None #plot the SED and model map if possible and asked if float(config['UpperLimit']['TSlimit']) < Fit.Ts(config['target']['name']): if config['Spectrum']['ResultPlots'] == 'yes': from enrico.constants import SpectrumPath utils.mkdir_p("%s/%s/" %(config['out'],SpectrumPath)) sedresult = FitRunner.ComputeSED(Fit,dump=True) else: sedresult = FitRunner.ComputeSED(Fit,dump=False) if (config['energy']['decorrelation_energy'] == 'yes'): #Update the energy scale to decorrelation energy mes.info('Setting the decorrelation energy as new Scale for the spectral parameters') spectrum = Fit[FitRunner.obs.srcname].funcs['Spectrum'] modeltype = spectrum.genericName() genericName = Fit.model.srcs[FitRunner.obs.srcname].spectrum().genericName() varscale = None if genericName=="PowerLaw2": varscale = None elif genericName in ["PowerLaw", "PLSuperExpCutoff", "EblAtten::PLSuperExpCutoff"]: varscale = "Scale" elif genericName in ["LogParabola","EblAtten::LogParabola", \ "BrokenPowerLaw", "EblAtten::BrokenPowerLaw"]: varscale = "Eb" if varscale is not None: spectrum.getParam(varscale).setValue(sedresult.decE) FitRunner.PerformFit(Fit) #Get and dump the target specific results Result = FitRunner.GetAndPrintResults(Fit) utils.DumpResult(Result, config) # Make energy bins by running a *new* analysis Nbin = config['Ebin']['NumEnergyBins'] FitRunner.config['file']['parent_config'] = infile if config['Spectrum']['ResultParentPlots'] == "yes": plot_sed_fromconfig(get_config(config['file']['parent_config']),ignore_missing_bins=True) if config['Spectrum']['ResultPlots'] == 'yes' : outXml = utils._dump_xml(config) # the possibility of making the model map is checked inside the function FitRunner.ModelMap(outXml) if Nbin>0: FitRunner.config['Spectrum']['ResultParentPlots'] = "yes" plot_sed_fromconfig(get_config(infile),ignore_missing_bins=True) energybin.RunEbin(folder,Nbin,Fit,FitRunner,sedresult) del(sedresult) del(Result) del(FitRunner)
def _ManageFolder(self,path): """ All files will be stored in a subfolder name path + NLCbin Create a subfolder""" self.LCfolder = self.folder+"/LightCurve_"+str(self.Nbin)+"bins/" utils.mkdir_p(self.LCfolder) self.config['out'] = self.LCfolder
def Contour(config): # ROOT.gROOT.SetBatch(ROOT.kTRUE) # cres = ROOT.TCanvas("Contour") config["Spectrum"]["FitsGeneration"] = "no" parname1 = config["Contours"]["parname1"] parname2 = config["Contours"]["parname2"] FitRunner,Fit = RunGTlike.GenAnalysisObjects(config) spectrum = Fit[FitRunner.obs.srcname].funcs['Spectrum'] ParName = spectrum.paramNames mes = Loggin.Message() mes.info("Computing Contours for "+parname1+" and "+parname2) ### Check part !!!! findpar2 = findpar1 = False for par in ParName : #Loop over the parameters to check if par == parname1: findpar1 = True if not(spectrum.getParam(par).isFree()): mes.error(parname1+" is not a free parameter") if par == parname2: findpar2 = True if not(spectrum.getParam(par).isFree()): mes.error(parname2+" is not a free parameter") if not(findpar1): mes.error(parname1+" is not a valid parameter") if not(findpar2): mes.error(parname2+" is not a valid parameter") bestloglike = Fit.fit(0,covar=False,optimizer=config['fitting']['optimizer']) print spectrum print "Min LogLikelihood =",bestloglike ## get values ParValue1 = spectrum.getParam(parname1).value() ParError1 = spectrum.getParam(parname1).error() bmin1,bmax1 = spectrum.getParam(parname1).getBounds() bmin1 = max(bmin1,ParValue1-20*ParError1) bmax1 = min(bmax1,ParValue1+20*ParError1) ParValue2 = spectrum.getParam(parname2).value() ParError2 = spectrum.getParam(parname2).error() bmin2,bmax2 = spectrum.getParam(parname2).getBounds() bmin2 = max(bmin2,ParValue2-20*ParError2) bmax2 = min(bmax2,ParValue2+20*ParError2) N = 100 param2 = numpy.zeros(N) loglike = ROOT.TH2F("loglike","Contours (68%, 95%, 99%)",N,bmin1,bmax1,N,bmin2,bmax2) spectrum.getParam(parname2).setFree(0) mes.info("Boundaries for "+parname1+" ["+str(bmin1)+","+str(bmax1)+"]") mes.info("Boundaries for "+parname2+" ["+str(bmin2)+","+str(bmax2)+"]") for i in xrange(N): param2[i] = bmin2 + (bmax2-bmin2)*i/(N-1.) spectrum.getParam(parname2).setValue(param2[i]) param1,ll = MakeScan(Fit,spectrum,parname1,bmin1,bmax1,config['fitting']['optimizer'],N) for j in xrange(N): loglike.Fill(param1[j],param2[i],ll[j]) utils.mkdir_p(config["out"]+"/"+cst.ScanPath) cres = ROOT.TCanvas("Contours") loglike.SetMinimum(bestloglike); loglike.SetMaximum(bestloglike+3); loglike.SetXTitle(parname1); loglike.SetYTitle(parname2); loglike.SetStats(000) loglike.SetContour(3) loglike.SetContourLevel(0,bestloglike+0.5) loglike.SetContourLevel(1,bestloglike+4./2.) loglike.SetContourLevel(2,bestloglike+6.63/2.) loglike.Draw("CONT1"); tgrres = ROOT.TGraphErrors(2,array.array('f',[ParValue1,ParValue1]),array.array('f',[ParValue2,ParValue2]),array.array('f',[ParError1,ParError1]),array.array('f',[ParError2,ParError2])) tgrres.Draw(".pz") cres.Print(config["out"]+"/"+cst.ScanPath+ "/Contours_"+parname1+"_"+parname2+".eps") cres.Print(config["out"]+"/"+cst.ScanPath+ "/Contours_"+parname1+"_"+parname2+".C") cres.Print(config["out"]+"/"+cst.ScanPath+ "/Contours_"+parname1+"_"+parname2+".png") mes.success("Scan Performed")
#!/usr/bin/env python from RunGTlike import Analysis import sys from enrico import utils from enrico import Loggin from enrico.config import get_config if __name__ == '__main__': mes = Loggin.Message() try: infile = sys.argv[1] except: print('Usage: ' + sys.argv[0] + ' <config file name>') mes.error('Config file not found.') """Run an Fermi analysis to generate FITS files by reading a config file""" config = get_config(infile) folder = config['out'] utils.mkdir_p(folder) Analyse = Analysis(folder, config, \ configgeneric=config,\ tag="", verbose = 1)
def run(infile): from enrico import utils from enrico import energybin from enrico.config import get_config from enrico import Loggin mes = Loggin.Message() """Run an entire Fermi analysis (spectrum) by reading a config file""" config = get_config(infile) folder = config['out'] utils.mkdir_p(folder) FitRunner, Fit = GenAnalysisObjects(config) # create all the fit files and run gtlike FitRunner.PerformFit(Fit) sedresult = None #plot the SED and model map if possible and asked if float(config['UpperLimit']['TSlimit']) < Fit.Ts( config['target']['name']): if config['Spectrum']['ResultPlots'] == 'yes': from enrico.constants import SpectrumPath utils.mkdir_p("%s/%s/" % (config['out'], SpectrumPath)) sedresult = FitRunner.ComputeSED(Fit, dump=True) else: sedresult = FitRunner.ComputeSED(Fit, dump=False) if (config['energy']['decorrelation_energy'] == 'yes'): #Update the energy scale to decorrelation energy mes.info( 'Setting the decorrelation energy as new Scale for the spectral parameters' ) spectrum = Fit[FitRunner.obs.srcname].funcs['Spectrum'] modeltype = spectrum.genericName() genericName = Fit.model.srcs[ FitRunner.obs.srcname].spectrum().genericName() varscale = None if genericName == "PowerLaw2": varscale = None elif genericName in [ "PowerLaw", "PLSuperExpCutoff", "EblAtten::PLSuperExpCutoff" ]: varscale = "Scale" elif genericName in ["LogParabola","EblAtten::LogParabola", \ "BrokenPowerLaw", "EblAtten::BrokenPowerLaw"]: varscale = "Eb" if varscale is not None: spectrum.getParam(varscale).setValue(sedresult.decE) FitRunner.PerformFit(Fit) #Get and dump the target specific results Result = FitRunner.GetAndPrintResults(Fit) utils.DumpResult(Result, config) # Make energy bins by running a *new* analysis Nbin = config['Ebin']['NumEnergyBins'] FitRunner.config['file']['parent_config'] = infile if config['Spectrum']['ResultParentPlots'] == "yes": plot_sed_fromconfig(get_config(config['file']['parent_config']), ignore_missing_bins=True) if config['Spectrum']['ResultPlots'] == 'yes': outXml = utils._dump_xml(config) # the possibility of making the model map is checked inside the function FitRunner.ModelMap(outXml) if Nbin > 0: FitRunner.config['Spectrum']['ResultParentPlots'] = "yes" plot_sed_fromconfig(get_config(infile), ignore_missing_bins=True) energybin.RunEbin(folder, Nbin, Fit, FitRunner, sedresult) del (sedresult) del (Result) del (FitRunner)
def PrepareEbin(Fit, FitRunner,sedresult=None): """ Prepare the computation of spectral point in energy bins by i) removing the weak sources (TS<1) # not true ii) updating the config file (option and energy) and save it in a new ascii file iii) changing the spectral model and saving it in a new xml file. A list of the ascii files is returned""" mes = Loggin.Message() NEbin = int(FitRunner.config['Ebin']['NumEnergyBins']) config = FitRunner.config config['verbose'] ='no' #Be quiet #Replace the evt file with the fits file produced before #in order to speed up the production of the fits files config['file']['event'] = FitRunner.obs.eventcoarse #update the config to allow the fit in energy bins config['UpperLimit']['envelope'] = 'no' config['Ebin']['NumEnergyBins'] = '0'#no new bin in energy! config['target']['redshift'] = '0'#Disable EBL correction config['out'] = FitRunner.config['out'] + '/'+EbinPath + str(NEbin) config['Spectrum']['ResultPlots'] = 'no' #no SED plot/modelmap #copy the chose of the user for the enery bin computing config['Spectrum']['FitsGeneration'] = config['Ebin']['FitsGeneration'] config['UpperLimit']['TSlimit'] = config['Ebin']['TSEnergyBins'] tag = FitRunner.config['file']['tag'] lEmax = np.log10(float(FitRunner.config['energy']['emax'])) lEmin = np.log10(float(FitRunner.config['energy']['emin'])) utils._log("Preparing submission of fit into energy bins") print(" Emin = ", float(FitRunner.config['energy']['emin']), " Emax = ", float(FitRunner.config['energy']['emax']), " Nbins = ", NEbin) if config['Ebin']['DistEbins'] in ['TS','mix'] and sedresult!=None: # Make the bins equispaced in sum(SED/SEDerr) - using the butterfly ipo = 0 iTS = sedresult.SED/sedresult.Err TScumula = 0 TSperbin = 1.*sum(iTS)/NEbin ener = [10**lEmin] while ipo<len(sedresult.E)-1: TScumula += iTS[ipo] if TScumula/TSperbin > 1: ener.append(sedresult.E[ipo]) TScumula -= TSperbin ipo += 1 ener.append(10**lEmax) ener = np.array(ener) # intermediate approach (between both TS-spaced and logE spaced) if config['Ebin']['DistEbins'] == 'mix': ener = 0.5*(ener + np.logspace(lEmin, lEmax, NEbin + 1)) else: # Make the bins equispaced in logE (standard) ener = np.logspace(lEmin, lEmax, NEbin + 1) utils.mkdir_p(config['out']) paramsfile = [] srcname = FitRunner.config['target']['name'] if config['UpperLimit']['TSlimit']>Fit.Ts(srcname) : utils._log('Re-optimize', False) print "An upper limit has been computed. The fit need to be re-optmized" Fit.optimize(0) Pref = utils.ApproxPref(Fit, ener, srcname) Gamma = utils.ApproxGamma(Fit, ener, srcname) Model_type = Fit.model.srcs[srcname].spectrum().genericName() # if the model is not PowerLaw : change the model if not(Model_type == 'PowerLaw'): for comp in Fit.components: comp.logLike.getSource(srcname).setSpectrum("PowerLaw") #Change model config['target']['spectrum'] = "PowerLaw" xmltag_list = [""]#handle summed like analysis if config['ComponentAnalysis']['FrontBack'] == "yes": xmltag_list = ["_FRONT","_BACK"] mes.info("Splitting Front/Back events") elif config['ComponentAnalysis']['PSF'] == "yes": xmltag_list = ["_PSF0","_PSF1","_PSF2","_PSF3"] mes.info("Splitting PSF events") elif config['ComponentAnalysis']['EDISP'] == "yes": xmltag_list = ["_EDISP0","_EDISP1","_EDISP2","_EDISP3"] mes.info("Splitting EDISP events") for ibin in xrange(NEbin):#Loop over the energy bins E = utils.GetE0(ener[ibin + 1],ener[ibin]) mes.info("Submitting # "+str(ibin)+" at energy "+str(E)) #Update the model for the bin for comp,xmltag in zip(Fit.components, xmltag_list): NewFitObject = ChangeModel(comp, ener[ibin], ener[ibin + 1], srcname, Pref[ibin] ,Gamma[ibin]) Xmlname = (config['out'] + "/" + srcname + "_" + str(ibin) +xmltag+ ".xml") NewFitObject.writeXml(Xmlname)# dump the corresponding xml file config['file']['xml'] = Xmlname.replace(xmltag,"") #update the energy bounds config['energy']['emin'] = str(ener[ibin]) config['energy']['emax'] = str(ener[ibin + 1]) config['energy']['decorrelation_energy'] = "no" # Change the spectral index to follow the Estimated Gamma # if approximated Gamma is outside of bounds set it to limit Gamma_min=-5 Gamma_max=0.5 config['UpperLimit']['SpectralIndex'] = -min(max(Gamma_min,Gamma[ibin]),Gamma_max) config['file']['tag'] = tag + '_Ebin' + str(NEbin) + '_' + str(ibin) filename = config['target']['name'] + "_" + str(ibin) + ".conf" paramsfile.append(filename) config.write(open(config['out'] + '/' +paramsfile[ibin], 'w')) #save the config file in a ascii file return paramsfile