def CreateLikeObject(self): """Create an UnbinnedAnalysis or a BinnedAnalysis and retrun it. By default, the optimizer is DRMNGB """ #create binnedAnalysis object if self.config['analysis']['likelihood'] == 'binned': Obs = BinnedObs(srcMaps=self.obs.scrMap, expCube=self.obs.Cubename, binnedExpMap=self.obs.BinnedMapfile, irfs=self.obs.irfs) Fit = BinnedAnalysis(Obs, self.obs.xmlfile, optimizer='DRMNGB') #create a unbinnedAnalysis object if self.config['analysis']['likelihood'] == 'unbinned': Obs = UnbinnedObs(self.obs.eventfile, self.obs.ft2, expMap=self.obs.Mapname, expCube=self.obs.Cubename, irfs=self.obs.irfs) Fit = UnbinnedAnalysis(Obs, self.obs.xmlfile, optimizer='DRMNGB') if float(self.config['Spectrum']['FrozenSpectralIndex']) > 0: if Fit.model.srcs[self.obs.srcname].spectrum().genericName( ) == "PowerLaw" or Fit.model.srcs[ self.obs.srcname].spectrum().genericName() == "PowerLaw2": PhIndex = Fit.par_index(self.obs.srcname, 'Index') Fit[PhIndex] = -float( self.config['Spectrum']['FrozenSpectralIndex']) Fit.freeze(PhIndex) print "Freezing spectral index at ", -float( self.config['Spectrum']['FrozenSpectralIndex']) else: log.warning( "The model is not a PowerLaw. Cannot freeze the index.") return Fit #return the BinnedAnalysis or UnbinnedAnalysis object.
def setup_gtlike(self): self._obs = BinnedObs(srcMaps=self.srcmapfile, expCube=self.config['ltcube'], binnedExpMap=self.bexpfile, irfs=self.config['irfs']) self._like = BinnedAnalysis(binnedData=self._obs, srcModel=self.srcmdl, optimizer=self.config['optimizer'])
def GetObsList(lobssum): obs = [] lobs = lobssum.split('|') for l0 in lobs: l = [s.strip() for s in l0.split(',')] print l obs.append( BinnedObs(srcMaps=l[0], expCube=l[1], binnedExpMap=l[2], irfs='CALDB')) print "IRFs:", obs[-1].irfs return obs
def CreateLikeObject(self): """Create an UnbinnedAnalysis or a BinnedAnalysis and retrun it.""" #create binnedAnalysis object if self.config['analysis']['likelihood'] == 'binned': Obs = BinnedObs(srcMaps=self.obs.srcMap, expCube=self.obs.Cubename, binnedExpMap=self.obs.BinnedMapfile, irfs=self.obs.irfs) Fit = BinnedAnalysis(Obs, self.obs.xmlfile, optimizer=self.config['fitting']['optimizer']) #create a unbinnedAnalysis object if self.config['analysis']['likelihood'] == 'unbinned': Obs = UnbinnedObs(self.obs.mktimefile, self.obs.ft2, expMap=self.obs.Mapname, expCube=self.obs.Cubename, irfs=self.obs.irfs) Fit = UnbinnedAnalysis( Obs, self.obs.xmlfile, optimizer=self.config['fitting']['optimizer']) # Fix this, EBL absorbed models use LogParabola with b=0 instead of PowerLaw, we may want to allow fixed shape for that case if float(self.config['Spectrum']['FrozenSpectralIndex'] > 0 ) and self.config['target']['spectrum'] == "PowerLaw": parameters = dict() parameters['Index'] = -float( self.config['Spectrum']['FrozenSpectralIndex']) # parameters['alpha'] = +float(self.config['Spectrum']['FrozenSpectralIndex']) # parameters['Index1'] = -float(self.config['Spectrum']['FrozenSpectralIndex']) # parameters['beta'] = 0 # parameters['Index2'] = 2. # parameters['Cutoff'] = 30000. # set the cutoff to be high for key in parameters.keys(): try: IdGamma = utils.getParamIndx(Fit, self.obs.srcname, key) Fit[IdGamma] = parameters[key] # set the parameter Fit[IdGamma].setFree( 0) #the variable index is frozen to compute the UL except: continue else: self.info("Freezing %s at %s"\ %(key,str(self.config['Spectrum']['FrozenSpectralIndex']))) return Fit #return the BinnedAnalysis or UnbinnedAnalysis object.
def CreateLikeObject(self): """Create an UnbinnedAnalysis or a BinnedAnalysis and retrun it.""" #create binnedAnalysis object if self.config['analysis']['likelihood'] == 'binned': use_edisp = self.config['analysis']['EnergyDispersion'] == 'yes' edisp_bins = -2 if use_edisp==True else 0 Obs = BinnedObs(srcMaps=self.obs.srcMap, expCube=self.obs.Cubename, binnedExpMap=self.obs.BinnedMapfile, irfs=self.obs.irfs) Cfg = BinnedConfig(use_edisp=use_edisp, edisp_bins=edisp_bins) Fit = BinnedAnalysis(Obs, self.obs.xmlfile, config=Cfg, optimizer=self.config['fitting']['optimizer']) Fit.setEnergyRange(self.obs.Emin,self.obs.Emax) print("Is edisp enabled? {0}".format(str(Fit.logLike.use_edisp()))) #create a unbinnedAnalysis object if self.config['analysis']['likelihood'] == 'unbinned': Obs = UnbinnedObs(self.obs.mktimefile, self.obs.ft2, expMap=self.obs.Mapname, expCube=self.obs.Cubename, irfs=self.obs.irfs) Fit = UnbinnedAnalysis(Obs, self.obs.xmlfile, optimizer=self.config['fitting']['optimizer']) # Fix this, EBL absorbed models use LogParabola with b=0 instead of PowerLaw, # we may want to allow fixed shape for that case if float(self.config['Spectrum']['FrozenSpectralIndex']!=0): parameters = dict() parameters['Index'] = -float(self.config['Spectrum']['FrozenSpectralIndex']) parameters['alpha'] = +float(self.config['Spectrum']['FrozenSpectralIndex']) parameters['Index1'] = -float(self.config['Spectrum']['FrozenSpectralIndex']) parameters['beta'] = 0 parameters['Index2'] = 2. parameters['Cutoff'] = 30000. # set the cutoff to be high for key in parameters.keys(): IdGamma = utils.getParamIndx(Fit, self.obs.srcname, key) if (IdGamma == -1): continue else: self.info("Freezing %s = %s" %(str(key),str(parameters[key]))) Fit[IdGamma] = parameters[key] # set the parameter Fit[IdGamma].setFree(False)#the variable index is frozen to compute the UL return Fit #return the BinnedAnalysis or UnbinnedAnalysis object.
def CreateLikeObject(self): """Create an UnbinnedAnalysis or a BinnedAnalysis and retrun it.""" #create binnedAnalysis object if self.config['analysis']['likelihood'] == 'binned': Obs = BinnedObs(srcMaps=self.obs.srcMap, expCube=self.obs.Cubename, binnedExpMap=self.obs.BinnedMapfile, irfs=self.obs.irfs) Fit = BinnedAnalysis(Obs, self.obs.xmlfile, optimizer=self.config['fitting']['optimizer']) #create a unbinnedAnalysis object if self.config['analysis']['likelihood'] == 'unbinned': Obs = UnbinnedObs(self.obs.eventfile, self.obs.ft2, expMap=self.obs.Mapname, expCube=self.obs.Cubename, irfs=self.obs.irfs) Fit = UnbinnedAnalysis(Obs, self.obs.xmlfile, optimizer=self.config['fitting']['optimizer']) if float(self.config['Spectrum']['FrozenSpectralIndex']) != 0: parameters = dict() parameters['Index'] = -float(self.config['Spectrum']['FrozenSpectralIndex']) parameters['alpha'] = +float(self.config['Spectrum']['FrozenSpectralIndex']) parameters['Index1'] = -float(self.config['Spectrum']['FrozenSpectralIndex']) parameters['beta'] = 0 parameters['Index2'] = 2. parameters['Cutoff'] = 30000. # set the cutoff to be high for key in parameters.keys(): try: IdGamma = utils.getParamIndx(Fit, self.obs.srcname, key) Fit[IdGamma] = parameters[key] # set the parameter Fit[IdGamma].setFree(0)#the variable index is frozen to compute the UL except: continue else: self.info("Freezing %s at %s"\ %(key,str(self.config['Spectrum']['FrozenSpectralIndex']))) return Fit #return the BinnedAnalysis or UnbinnedAnalysis object.
def main(srcMaps=None, expCube=None, binnedExpMap=None, outname=None, IRFs='CALDB', index=2): if srcMaps is None: srcMaps = env['ccube'] expCube = env['lvtime'] binnedExpMap = env['bexpcube'] outname = env.get('mdlmap', 'modelcube.fits').replace('.fits', '_ps.fits') IRFs = env.get('irfs', 'CALDB') obs = BinnedObs(srcMaps=srcMaps, expCube=expCube, binnedExpMap=binnedExpMap, irfs=IRFs) with pyfits.open(srcMaps) as f: w = WCS(f[0].header) cpix = int(f[0].header['NAXIS1']) / 2 RA, DEC, e = w.wcs_pix2sky([[cpix, cpix, 0]], 0)[0] xmlfile = outname.replace('.fits', '') + '.xml' with open(xmlfile, 'w') as f: f.write(content_xml % (index, RA, DEC)) like1 = BinnedAnalysis(obs, xmlfile, optimizer='') os.remove(xmlfile) print 'creating Model Map...' like1.writeModelMap(outname) print 'Done!'
def __init__(self, roi, **kwargs): """ Build a gtlike pyLikelihood object which is consistent with a pointlike roi. """ keyword_options.process(self, kwargs) self.roi = roi if not roi.quiet: print 'Running a gtlike followup' self.old_dir = os.getcwd() if self.savedir is not None: self.savedata = True if not os.path.exists(self.savedir): os.makedirs(self.savedir) else: self.savedata = False self.savedir = mkdtemp(prefix=self.savedir_prefix) # put pfiles into savedir os.environ['PFILES'] = self.savedir + ';' + os.environ['PFILES'].split( ';')[-1] if not roi.quiet: print 'Saving files to ', self.savedir if self.emin == None and self.emax == None and self.enumbins == None: self.emin, self.emax = roi.bin_edges[0], roi.bin_edges[-1] self.enumbins = len(roi.bin_edges) - 1 elif self.emin is not None and \ self.emax is not None and \ self.enumbins is not None: # all set pass else: raise Exception("emin, emax, and enumbins must all be set.") # Note that this formulation makes the gtlike slightly smaller than # the pointlike ROI (so the gtlike ROI is inside the pointlike ROI) roi_radius = np.degrees(max(_.radius_in_rad for _ in roi.bands)) if self.bigger_roi: npix = int(math.ceil(2.0 * roi_radius / self.binsz)) else: npix = int(math.ceil(np.sqrt(2.0) * roi_radius / self.binsz)) ct = roi.sa.pixeldata.conv_type cmap_file = join(self.savedir, 'ccube.fits') srcmap_file = join(self.savedir, 'srcmap.fits') bexpmap_file = join(self.savedir, 'bexpmap.fits') input_srcmdl_file = join(self.savedir, 'srcmdl.xml') cut_ft1 = join(self.savedir, "ft1_cut.fits") ft2 = Gtlike.get_ft2(roi) ltcube = roi.sa.pixeldata.ltcube if self.fix_pointlike_ltcube: print 'Fixing pointlike ltcube %s' % ltcube livetime.fix_pointlike_ltcube(ltcube) irfs = Gtlike.get_gtlike_irfs(roi) if self.galactic: x, y, coordsys_str = roi.roi_dir.l(), roi.roi_dir.b(), 'GAL' else: x, y, coordsys_str = roi.roi_dir.ra(), roi.roi_dir.dec(), 'CEL' Gtlike.save_xml(roi, input_srcmdl_file, extended_dir_name=self.extended_dir_name) evfile = Gtlike.make_evfile(roi, self.savedir) if not os.path.exists(cut_ft1): if not roi.quiet: print 'Running gtselect' gtselect = GtApp('gtselect', 'dataSubselector') gtselect.run(infile=evfile, outfile=cut_ft1, ra=0, dec=0, rad=180, tmin=0, tmax=0, emin=self.emin, emax=self.emax, zmax=180, convtype=ct, chatter=self.chatter) else: if not roi.quiet: print '... Skiping gtselect' if not os.path.exists(cmap_file): if not roi.quiet: print 'Running gtbin (ccube)' gtbin = GtApp('gtbin', 'evtbin') gtbin.run(algorithm='ccube', nxpix=npix, nypix=npix, binsz=self.binsz, evfile=cut_ft1, outfile=cmap_file, scfile=ft2, xref=x, yref=y, axisrot=0, proj=self.proj, ebinalg='LOG', emin=self.emin, emax=self.emax, enumbins=self.enumbins, coordsys=coordsys_str, chatter=self.chatter) else: if not roi.quiet: print '... Skiping gtbin (ccube)' if not os.path.exists(bexpmap_file): # Use the default binning all sky, 1deg/pixel if not roi.quiet: print 'Running gtexpcube' gtexpcube = GtApp('gtexpcube2', 'Likelihood') gtexpcube.run(infile=ltcube, cmap='none', ebinalg='LOG', emin=self.emin, emax=self.emax, enumbins=self.enumbins, outfile=bexpmap_file, proj='CAR', nxpix=360, nypix=180, binsz=1, irfs=irfs, coordsys=coordsys_str, chatter=self.chatter) else: if not roi.quiet: print '... Skiping gtexpcube' if not os.path.exists(srcmap_file): if not roi.quiet: print 'Running gtsrcmaps' gtsrcmaps = GtApp('gtsrcmaps', 'Likelihood') gtsrcmaps.run(scfile=ft2, expcube=ltcube, cmap=cmap_file, srcmdl=input_srcmdl_file, bexpmap=bexpmap_file, outfile=srcmap_file, irfs=irfs, rfactor=self.rfactor, resample=self.resample, minbinsz=self.minbinsz, chatter=self.chatter) else: if not roi.quiet: print '... Skiping gtsrcmaps' if not roi.quiet: print 'Creating Binned LIKE' obs = BinnedObs(srcMaps=srcmap_file, expCube=ltcube, binnedExpMap=bexpmap_file, irfs=irfs) self.like = BinnedAnalysis(binnedData=obs, srcModel=input_srcmdl_file, optimizer=self.optimizer) if self.enable_edisp: if not roi.quiet: print 'Enabeling energy dispersion' self.like.logLike.set_edisp_flag(True) if not roi.quiet: print 'Binned LIKE Created!'
else: dellist = [x.strip() for x in dellist.split(',')] start = time.time() if statistic != 'BINNED': print '%s method is not implemented in this script' % statistic exit(1) lobssum = env.get('SUM_LIST') # srcmap1, ltcube1, expcube1 | srcmap2, ltcube2, expcube2 | ... if lobssum is not None: obs = GetObsList(lobssum) else: obs = BinnedObs(srcMaps=srcMaps, expCube=expCube, binnedExpMap=binnedExpMap, irfs=IRFs) like1 = GetLikeObj(obs, modelin, optimizer) if refit: optim_refit = env.get('optimizer_prerefit', optimizer) tol_refit = env.get('Tolerance_prerefit', tol) like1.tol = tol_refit print 'tolerance =', like1.tol Likelihood(like1, modelout + '_refit', optim_refit, statistic, None, results, plot, slist, optmdl, True, bayes, None, None, dellist) print '\nRefit\n'