Example #1
0
    def __init__(self, config=None, **kwargs):
        super(AnalysisManager, self).__init__()
        self.update_default_config(SelectorTask, group='select')

        self.configure(config, **kwargs)

        import pprint

        pprint.pprint(self.config)

        self._like = SummedLikelihood()
Example #2
0
def GenAnalysisObjects(config, verbose=1, xmlfile=""):
    #check is the summed likelihood method should be used and get the
    #Analysis objects (observation and (Un)BinnedAnalysis objects)
    SummedLike = config['Spectrum']['SummedLike']
    folder = config['out']
    if SummedLike == 'yes':
        # Create two obs instances
        sys.exit("not yet working")
        FitRunnerfront = Analysis(folder, config, tag="FRONT", verbose=verbose)
        FitRunnerback = Analysis(folder, config, tag="BACK", verbose=verbose)
        if not (xmlfile == ""):
            FitRunnerfront.obs.xmlfile = xmlfile
            FitRunnerback.obs.xmlfile = xmlfile
        FitB = FitRunnerback.CreateLikeObject()
        FitF = FitRunnerfront.CreateLikeObject()
        import SummedLikelihood
        Fit = SummedLikelihood.SummedLikelihood()
        Fit.addComponent(FitB)
        Fit.addComponent(FitF)
        FitRunner = FitRunnerback
    else:
        # Create one obs instance
        FitRunner = Analysis(folder, config, tag="", verbose=verbose)
    if not (xmlfile == ""):
        FitRunner.obs.xmlfile = xmlfile
    Fit = FitRunner.CreateLikeObject()
    return FitRunner, Fit
Example #3
0
    def __init__(self,config=None,**kwargs):
        super(AnalysisManager,self).__init__()
        self.update_default_config(SelectorTask,group='select')
        
        self.configure(config,**kwargs)

        import pprint

        pprint.pprint(self.config)

        self._like = SummedLikelihood()
Example #4
0
    def _GenFit(self):
        try:
            del self.Fit
        except:
            pass

        if self.config['Spectrum']['SummedLike'] == 'yes':
            Obs1 = Observation(self.folder, self.config, tag="FRONT")
            Obs2 = Observation(self.folder, self.config, tag="BACK")
            self.FitRunnerfront = FitMaker(Obs1, self.config)
            self.FitRunnerback = FitMaker(Obs2, self.config)
            self.FitRunnerfront.CreateLikeObject()
            self.FitRunnerback.CreateLikeObject()
            self.Fit = SummedLikelihood.SummedLikelihood()
        else:
            Obs = Observation(self.folder, self.config, tag="")
            self.FitRunner = FitMaker(Obs, self.config)  ##Class
            #self.Fit = self.FitRunner.CreateLikeObject()
            self.Fit = SummedLikelihood.SummedLikelihood()
            self.Fit.addComponent(self.FitRunner.CreateLikeObject())
Example #5
0
def GenAnalysisObjects(config, verbose=1, xmlfile=""):

    mes = Loggin.Message()
    #check is the summed likelihood method should be used and get the
    #Analysis objects (observation and (Un)BinnedAnalysis objects)
    folder = config['out']

    # If there are no xml files, create it and print a warning
    if len(glob.glob(config['file']['xml'].replace('.xml', '*.xml'))) == 0:
        mes.warning("Xml not found, creating one for the given config %s" %
                    config['file']['xml'])
        XmlMaker(config)

    Fit = SummedLikelihood.SummedLikelihood()
    if hasKey(config, 'ComponentAnalysis') == True:
        # Create one obs instance for each component
        configs = [None] * 4
        Fits = [None] * 4
        Analyses = [None] * 4
        if isKey(config['ComponentAnalysis'], 'FrontBack') == 'yes':
            from enrico.data import fermievtypes
            mes.info("Breaking the analysis in Front/Back events")
            # Set Summed Likelihood to True
            oldxml = config['file']['xml']
            for k, TYPE in enumerate(["FRONT", "BACK"]):
                configs[k] = ConfigObj(config)
                configs[k]['event']['evtype'] = fermievtypes[TYPE]
                try:
                    Analyses[k] = Analysis(folder, configs[k], \
                        configgeneric=config,\
                        tag=TYPE, verbose = verbose)
                    if not (xmlfile == ""): Analyses[k].obs.xmlfile = xmlfile
                    Fits[k] = Analyses[k].CreateLikeObject()
                    Fit.addComponent(Fits[k])
                except RuntimeError, e:
                    if 'RuntimeError: gtltcube execution failed' in str(e):
                        mes.warning("Event type %s is empty! Error is %s" %
                                    (TYPE, str(e)))
            FitRunner = Analyses[0]
Example #6
0
    def __init__(self, config):
         self.config = config
         self.folder = self.config['out']
         os.system("mkdir -p "+self.folder+"/TestModel")
         convtype = self.config['analysis']['convtype']

         if self.config['Spectrum']['SummedLike'] == 'yes':
             Obs1 = Observation(self.folder, self.config, convtype=0, tag="FRONT")
             Obs2 = Observation(self.folder, self.config, convtype=1, tag="BACK")
             FitRunnerfront = FitMaker(Obs1, self.config)
             FitRunnerback = FitMaker(Obs2, self.config)
             FitRunnerfront.CreateLikeObject()
             FitRunnerback.CreateLikeObject()
             self.Fit = SummedLikelihood.SummedLikelihood()
         else:
             Obs = Observation(self.folder, self.config, convtype, tag="")
             FitRunner = FitMaker(Obs, self.config)##Class
             self.Fit = FitRunner.CreateLikeObject()

         # Store the results in a dictionnary
         self.Results = {}
         self.Results["PowerLaw"] = 0
         self.Results["LogParabola"] = 0
         self.Results["PLSuperExpCutoff"] = 0
Example #7
0
class AnalysisManager(Configurable):

    default_config = { 'convtype'   : -1,
                       'binsperdec' : 4,
                       'savedir'    : None,
                       'scratchdir' : None,
                       'target'     : None,
                       'evfile'     : None,
                       'scfile'     : None,
                       'ltcube'     : None,
                       'galdiff'    : None,
                       'isodiff'    : None,
                       'event_types': None,
                       'gtbin'      : None,
                       'catalog'    : '2FGL',
                       'optimizer'  : 'MINUIT',
                       'joint'      : None,
                       'irfs'       : None }
    
    def __init__(self,config=None,**kwargs):
        super(AnalysisManager,self).__init__()
        self.update_default_config(SelectorTask,group='select')
        
        self.configure(config,**kwargs)

        import pprint

        pprint.pprint(self.config)

        self._like = SummedLikelihood()
        
    
    @property
    def like(self):
        return self._like

    @property
    def logLike(self):
        return self._like.logLike
        
    def setup_roi(self,**kwargs):

        target_name = self.config['target']
        
        cat = Catalog.get('2fgl')
        self.src = CatalogSource(cat.get_source_by_name(target_name))

        
        if self.config['savedir'] is None:
            self.set_config('savedir',target_name)

        if not os.path.exists(self.config['savedir']):
            os.makedirs(self.config['savedir'])
        
        config = self.config

        self.savestate = os.path.join(config['savedir'],
                                    "%s_savestate.P"%target_name)
        
        self.ft1file = os.path.join(config['savedir'],
                                    "%s_ft1.fits"%target_name)

        
            
        self.binfile = os.path.join(config['savedir'],
                                    "%s_binfile.fits"%target_name)
        self.srcmdl = os.path.join(config['savedir'],
                                   "%s_srcmdl.xml"%target_name)
        
        self.srcmdl_fit = os.path.join(config['savedir'],
                                       "%s_srcmdl_fit.xml"%target_name)
        

        if os.path.isfile(config['ltcube']) and \
                re.search('\.fits?',config['ltcube']):
            self.ltcube = config['ltcube']
        else:
            ltcube = sorted(glob.glob(config['ltcube']))

            
            self.ltcube = os.path.join(config['savedir'],
                                       "%s_ltcube.fits"%target_name)

            lt_task = LTSumTask(self.ltcube,infile1=ltcube,
                                config=config)

            lt_task.run()

        
        self.evfile = config['evfile']#sorted(glob.glob(config['evfile']))
#        if len(self.evfile) > 1:
#            evfile_list = os.path.join(self.config('savedir'),'evfile.txt')
#            np.savetxt(evfile_list,self.evfile,fmt='%s')
#            self.evfile = os.path.abspath(evfile_list)
#        else:
#            self.evfile = self.evfile[0]
            
#        if len(self.ltfile) > 1:
#            ltfile_list = os.path.join(self.config('savedir'),'ltfile.txt')
#            np.savetxt(ltfile_list,self.ltfile,fmt='%s')
#            self.ltfile = os.path.abspath(ltfile_list)
#        else:
#            self.ltfile = self.ltfile[0]
            
#        print self.evfile
#        print self.ltfile
        
        self.skydir = SkyDir(self.src.ra,self.src.dec)

        sel_task = SelectorTask(self.evfile,self.ft1file,
                                ra=self.src.ra,dec=self.src.dec,
                                config=config['select'],overwrite=False)
        sel_task.run()

        cat.create_roi(self.src.ra,self.src.dec,
                       config['isodiff'],
                       config['galdiff'],                       
                       self.srcmdl,radius=5.0)
        
#        self.setup_pointlike()

        self.components = []
                
        for i, t in enumerate(self.config['joint']):

            print 'Setting up binned analysis ', i

#            kw = dict(irfs=None,isodiff=None)
#            kw.update(t)
            
            analysis = BinnedGtlike(self.src,
                                    target_name + '_%02i'%(i),
                                    config,
                                    evfile=self.ft1file,
                                    srcmdl=self.srcmdl,
                                    gtselect=dict(evclass=t['evclass'],
                                                  evtype=t['evtype']),
#                                    convtype=t['convtype'],
                                    irfs=t['irfs'],
                                    isodiff=t['isodiff'])

            analysis.setup_inputs()
            analysis.setup_gtlike()
            
            self.components.append(analysis)
            self._like.addComponent(analysis.like)

#        for i, p in self.tied_pars.iteritems():
#            print 'Tying parameters ', i, p            
#            self.comp_like.tieParameters(p)

        self._like.energies = self.components[0].like.energies
            
        return
            
        for i, p in enumerate(self.components[0].like.params()):

            print i, p.srcName, p.getName()

            tied_params = []            
            for c in self.components:
                tied_params.append([c.like,p.srcName,p.getName()])
            self.comp_like.tieParameters(tied_params)
                
#        self.tied_pars = {}
#        for x in self.components:
        
#            for s in x.like.sourceNames():
#                p = x.like.normPar(s)                
#                pidx = x.like.par_index(s,p.getName())

#                if not pidx in self.tied_pars:
#                    self.tied_pars[pidx] = []

#                self.tied_pars[pidx].append([x.like,s,p.getName()])
                    
#                print s, p.getName()        
#                self.norm_pars.append([x.like,s,p.getName()])
#            self.norm_pars.append([self.analysis1.like,src,p.getName()])

    def fit(self):

        saved_state = LikelihoodState(self.like)
        
        print 'Fitting model'
        self.like.fit(verbosity=2, covar=True)

        source_dict = gtlike_source_dict(self.like,self.src.name) 

        import pprint
        pprint.pprint(source_dict)

    def write_xml_model(self):        
        
        for c in self.components:
            c.write_model()
#            c.make_srcmodel()

    def make_source_model(self):

        for c in self.components:
            c.make_srcmodel()
            
#    def gtlike_results(self, **kwargs):
#        from lande.fermi.likelihood.save import source_dict
#        return source_dict(self.like, self.name, **kwargs)

#    def gtlike_summary(self):
#        from lande.fermi.likelihood.printing import gtlike_summary
#        return gtlike_summary(self.like,maxdist=self.config['radius'])
        
    def free_source(self,name,free=True):
        """ Free a source in the ROI 
            source : string or pointlike source object
            free   : boolean to free or fix parameter
        """
        freePars = self.like.freePars(name)
        normPar = self.like.normPar(name).getName()
        idx = self.like.par_index(name, normPar)
        if not free:
            self.like.setFreeFlag(name, freePars, False)
        else:
            self.like[idx].setFree(True)
        self.like.syncSrcParams(name)
        
    def save(self):
        from util import save_object
        save_object(self,self.savestate)
            
    def setup_pointlike(self):

        if os.path.isfile(self.srcmdl): return
        
        config = self.config
        
        self._ds = DataSpecification(ft1files = self.ft1file,
                                     ft2files = config['scfile'],
                                     ltcube   = self.ltcube,
                                     binfile  = self.binfile)

        
        self._sa = SpectralAnalysis(self._ds,
                                    binsperdec = config['binsperdec'],
                                    emin       = config['emin'],
                                    emax       = config['emax'],
                                    irf        = config['irfs'],
                                    roi_dir    = self.skydir,
                                    maxROI     = config['radius'],
                                    minROI     = config['radius'],
                                    zenithcut  = config['zmax'],
                                    event_class= 0,
                                    conv_type  = config['convtype'])

        sources = []
        point_sources, diffuse_sources = [],[]

        galdiff = config['galdiff']        
        isodiff = config['isodiff']

        bkg_sources = self.get_background(galdiff,isodiff)
        sources += filter(None, bkg_sources)
        
        catalog = self.get_catalog(config['catalog'])
        catalogs = filter(None, [catalog])

        for source in sources:
            if isinstance(source,PointSource): point_sources.append(source)
            else: diffuse_sources.append(source)
        
        self._roi=self._sa.roi(roi_dir=self.skydir,
                               point_sources=point_sources,
                               diffuse_sources=diffuse_sources,
                               catalogs=catalogs,
                               fit_emin=config['emin'], 
                               fit_emax=config['emax'])

        # Create model file
        self._roi.toXML(self.srcmdl,
                        convert_extended=True,
                        expand_env_vars=True)
        
    @staticmethod
    def get_catalog(catalog=None, **kwargs):
        if catalog is None or isinstance(catalog,SourceCatalog):
            pass
        elif catalog == 'PSC3Y':
            catalog = Catalog3Y('/u/ki/kadrlica/fermi/catalogs/PSC3Y/gll_psc3yearclean_v1_assoc_v6r1p0.fit',
                                latextdir='/u/ki/kadrlica/fermi/catalogs/PSC3Y/',
                                prune_radius=0,
                                **kwargs)
        elif catalog == '2FGL':
            catalog = Catalog2FGL('/u/ki/kadrlica/fermi/catalogs/2FGL/gll_psc_v08.fit',
                               latextdir='/u/ki/kadrlica/fermi/catalogs/2FGL/Templates/',
                               prune_radius=0,
                               **kwargs)
        elif catalog == "1FGL":
            catalog = FermiCatalog('/u/ki/kadrlica/fermi/catalogs/gll_psc_v02.fit',
                               prune_radius=0,
                               **kwargs)
        else:
            raise Exception("Unknown catalog: %s"%catalog)

        return catalog

    @staticmethod
    def get_background(galdiff=None, isodiff=None, limbdiff=None):
        """ Diffuse backgrounds
        galdiff: Galactic diffuse counts cube fits file
        isodiff: Isotropic diffuse spectral text file
        limbdiff: Limb diffuse counts map fits file
        """
        backgrounds = []

        if galdiff is None: gal=None
        else:
            gfile = os.path.basename(galdiff)
            gal = get_diffuse_source('MapCubeFunction',galdiff,
                                     'PowerLaw',None,
                                     os.path.splitext(gfile)[0])
            gal.smodel.set_default_limits()
            gal.smodel.freeze('index')
        backgrounds.append(gal)

        if isodiff is None: iso=None
        else:
            ifile = os.path.basename(isodiff)
            iso = get_diffuse_source('ConstantValue',None,'FileFunction'
                                     ,isodiff,
                                     os.path.splitext(ifile)[0])
            iso.smodel.set_default_limits()
        backgrounds.append(iso)        

        if limbdiff is None: limb=None
        else:
            lfile = basename(limbdiff)
            dmodel = SpatialMap(limbdiff)
            smodel = PLSuperExpCutoff(norm=3.16e-6,index=0,
                                      cutoff=20.34,b=1,e0=200)
            limb = ExtendedSource(name=name,model=smodel,spatial_model=dmodel)
            for i in range(limb.smodel.npar): limb.smodel.freeze(i)
            backgrounds.append(limb)
        backgrounds.append(limb)

        return backgrounds
Example #8
0
    def processAllObs(self, fix_shape=True, delete_below_ts=None,
                      ul_flux_dflux=0, ul_chi2_ts=None, ul_bayes_ts=4.0,
                      ul_cl=0.95, verbosity=0, emin=0, emax=0,
                      interim_save_filename=None):

        self.logger.info("Processing all observations.")
        for f in self.obsfiles:
            lc = dict()
            lc['config'] = dict()
            lc['config']['fix_shape']       = fix_shape
            lc['config']['delete_below_ts'] = delete_below_ts
            lc['config']['ul_flux_dflux']   = ul_flux_dflux
            lc['config']['ul_chi2_ts']      = ul_chi2_ts
            lc['config']['ul_bayes_ts']     = ul_bayes_ts
            lc['config']['ul_cl']           = ul_cl
            lc['config']['emin']            = emin
            lc['config']['emax']            = emax
            lc['config']['files']           = f
            #lc['config']['argv']            = sys.argv

            lc['e_min'] = emin;
            lc['e_max'] = emax;

            if type(f) != list:
                [ obs, like ] = self.loadObs(f,verbosity)
                lc['t_min'] = obs.roiCuts().minTime()
                lc['t_max'] = obs.roiCuts().maxTime()
                if (emin == 0 or emax == 0):
                    lc['e_min'] = obs.roiCuts().getEnergyCuts()[0];
                    lc['e_max'] = obs.roiCuts().getEnergyCuts()[1];

            else:
                lc['t_min'] = None
                lc['t_max'] = None
                like = SL.SummedLikelihood(self.optimizer)
                for ff in f:
                    [ obs, like1 ] = self.loadObs(ff,verbosity)
                    tmin = obs.roiCuts().minTime()
                    tmax = obs.roiCuts().maxTime()
                    if lc['t_min'] == None or tmin<lc['t_min']:
                        lc['t_min'] = tmin
                    if lc['t_max'] == None or tmax>lc['t_max']:
                        lc['t_max'] = tmax
                    if (lc['e_min'] == 0 or lc['e_max'] == 0):
                        ecuts = obs.roiCuts().getEnergyCuts()
                        lc['e_min'] = ecuts[0]
                        lc['e_max'] = ecuts[1]
                    elif (emin == 0 or emax == 0):
                        ecuts = obs.roiCuts().getEnergyCuts()
                        lc['e_min'] = max(lc['e_min'], ecuts[0])
                        lc['e_max'] = min(lc['e_max'], ecuts[1])
                    like.addComponent(like1)

            emin = lc['e_min']
            emax = lc['e_max']

            like.tol = like.tol*0.01;

            if verbosity > 1:
                print '- Time:',lc['t_min'],'to',lc['t_max']

            src = like[self.likelihoodConf['sourcename']]
            if src == None:
                raise NameError("No source \""+self.likelihoodConf['sourcename']+"\" in model "+
                                self.model)
            srcfreepar=like.freePars(self.likelihoodConf['sourcename'])
            srcnormpar=like.normPar(self.likelihoodConf['sourcename'])
            if len(srcfreepar)>0:
                like.setFreeFlag(self.likelihoodConf['sourcename'], srcfreepar, 0)
                like.syncSrcParams(self.likelihoodConf['sourcename'])


            meanvalue = srcnormpar.getValue()
            meanerror = srcnormpar.error()
            if meanerror == 0:
                self.logger.critical("The error on the normalization for your source is 0!  You need to do a global fit first (with quickLike) and provide the final XML file (<basename>_likeMinuit.xml) with errors included before you run compute.")
                return

            lc['original']=dict()
            lc['original']['normpar_init_value'] = meanvalue
            lc['original']['normpar_name'] = srcnormpar.getName()
            lc['original']['nfree'] = len(like.freePars(self.likelihoodConf['sourcename']))
            lc['original']['flux'] = like[self.likelihoodConf['sourcename']].flux(emin, emax)
            lc['original']['logL'] = like.logLike.value()
            if verbosity > 1:
                print '- Original log Like:',lc['original']['logL']

            if fix_shape:
                if verbosity > 1:
                    print '- Fixing spectral shape parameters'
                sync_name = ""
                for p in like.params():
                    if sync_name != "" and sync_name != p.srcName:
                        like.syncSrcParams(sync_name)
                        sync_name = ""
                    if(p.isFree() and p.srcName!=self.likelihoodConf['sourcename'] and
                       p.getName()!=like.normPar(p.srcName).getName()):
                        if verbosity > 2:
                            print '-- '+p.srcName+'.'+p.getName()
                        p.setFree(False)
                        sync_name = p.srcName
                if sync_name != "" and sync_name != p.srcName:
                    like.syncSrcParams(sync_name)
                    sync_name = ""

           # ----------------------------- FIT 1 -----------------------------

            if verbosity > 1:
                print '- Fit 1 - All parameters of',self.likelihoodConf['sourcename'],'fixed'
            like.fit(max(verbosity-3, 0))

            lc['allfixed'] = dict()
            lc['allfixed']['logL'] = like.logLike.value()
            fitstat = like.optObject.getRetCode()
            if verbosity > 1 and fitstat != 0:
                print "- Fit 1 - Minimizer returned with code: ", fitstat
            lc['allfixed']['fitstat'] = fitstat
            if verbosity > 1:
                print '- Fit 1 - log Like:',lc['allfixed']['logL']

            if delete_below_ts:
                frozensrc = []
                if verbosity > 1:
                    print '- Deleting point sources with TS<'+str(delete_below_ts)
                deletesrc = []
                for s in like.sourceNames():
                    freepars = like.freePars(s)
                    if(s!=self.likelihoodConf['sourcename'] and like[s].src.getType() == 'Point'
                       and len(freepars)>0):
                        ts = like.Ts(s)
                        if ts<delete_below_ts:
                            deletesrc.append(s)
                            if verbosity > 2:
                                print '--',s,'(TS='+str(ts)+')'
                if deletesrc:
                    for s in deletesrc:
                        like.deleteSource(s)
                    if verbosity > 1:
                        print '- Fit 1 - refitting model'
                    like.fit(max(verbosity-3, 0))
                    lc['allfixed']['fitstat_initial'] = \
                        lc['allfixed']['fitstat']
                    fitstat = like.optObject.getRetCode()
                    if verbosity > 1 and fitstat != 0:
                        print "- Fit 1 - Minimizer returned with code: ",\
                            fitstat
                    lc['allfixed']['fitstat'] = fitstat
                    lc['allfixed']['logL'] = like.logLike.value()
                    if verbosity > 1:
                        print '- Fit 1 - log Like:',lc['allfixed']['logL']


            lc['allfixed']['flux']=like[self.likelihoodConf['sourcename']].flux(emin, emax)
            pars = dict()
            for pn in like[self.likelihoodConf['sourcename']].funcs['Spectrum'].paramNames:
                p = like[self.likelihoodConf['sourcename']].funcs['Spectrum'].getParam(pn)
                pars[p.getName()] = dict(name      = p.getName(),
                                         value     = p.getTrueValue(),
                                         error     = p.error()*p.getScale(),
                                         free      = p.isFree())
            lc['allfixed']['pars'] = pars
    

            # ------------------ N SIGMA PROFILE LIKELIHOOD -------------------

            prof_sigma = (-1,-0.5,0,0.5,1.0)
            lc['profile'] = dict();
            lc['profile']['sigma'] = []
            lc['profile']['value'] = []
            lc['profile']['logL'] = []
            lc['profile']['flux'] = []
            lc['profile']['fitstat'] = []

            if verbosity > 1:
                print '- Fit 1 - generating %d point likelihood profile'%\
                      len(prof_sigma)
            for sigma in prof_sigma:
                val = sigma*meanerror+meanvalue
                if val < srcnormpar.getBounds()[0]:
                    val = srcnormpar.getBounds()[0]
                if (lc['profile']['value']
                    and lc['profile']['value'][-1]==val):
                    continue
                lc['profile']['value'].append(val)
                lc['profile']['sigma'].append((val-meanvalue)/meanerror)
                if(val == meanvalue):
                    lc['profile']['logL'].append(lc['allfixed']['logL'])
                    lc['profile']['flux'].append(lc['allfixed']['flux'])
                else:
                    srcnormpar.setValue(val)
                    like.syncSrcParams(self.likelihoodConf['sourcename'])
                    like.fit(max(verbosity-3, 0))
                    fitstat = like.optObject.getRetCode()
                    if verbosity > 2 and fitstat != 0:
                        print "- Fit 1 - profile: Minimizer returned code: ",\
                            fitstat
                    lc['profile']['fitstat'].append(fitstat)
                    lc['profile']['logL'].append(like.logLike.value())
                    lc['profile']['flux'].append(like[self.likelihoodConf['sourcename']].\
                                              flux(emin, emax))
                if verbosity > 2:
                    print '- Fit 1 - profile: %+g, %f -> %f'%\
                          (sigma,lc['profile']['value'][-1],
                           lc['profile']['logL'][-1]-lc['allfixed']['logL'])

            srcnormpar.setValue(meanvalue)
            like.syncSrcParams(self.likelihoodConf['sourcename'])

            # ----------------------------- FIT 2 -----------------------------

            if verbosity > 1:
                print '- Fit 2 - Normalization parameter of',\
                      self.likelihoodConf['sourcename'],'free'
            srcnormpar.setFree(1)
            like.syncSrcParams(self.likelihoodConf['sourcename'])
            like.fit(max(verbosity-3, 0))
            lc['normfree'] = dict()
            fitstat = like.optObject.getRetCode()
            if verbosity > 1 and fitstat != 0:
                print "- Fit 2 - Minimizer returned with code: ", fitstat
            lc['normfree']['fitstat'] = fitstat
            lc['normfree']['logL'] = like.logLike.value()
            lc['normfree']['ts'] = like.Ts(self.likelihoodConf['sourcename'])
            lc['normfree']['flux_dflux'] = \
                srcnormpar.getValue()/srcnormpar.error()
            if verbosity > 1:
                print '- Fit 2 - log Like:',lc['normfree']['logL'],\
                      '(TS='+str(lc['normfree']['ts'])+')'

            lc['normfree']['nfree']=len(like.freePars(self.likelihoodConf['sourcename']))
            lc['normfree']['flux']=like[self.likelihoodConf['sourcename']].flux(emin, emax)
            pars = dict()
            for pn in like[self.likelihoodConf['sourcename']].funcs['Spectrum'].paramNames:
                p = like[self.likelihoodConf['sourcename']].funcs['Spectrum'].getParam(pn)
                pars[p.getName()] = dict(name      = p.getName(),
                                         value     = p.getTrueValue(),
                                         error     = p.error()*p.getScale(),
                                         free      = p.isFree())
            lc['normfree']['pars'] = pars
            ul_type = None
            if ul_bayes_ts != None and lc['normfree']['ts'] < ul_bayes_ts:

                ul_type = 'bayesian'
                [ul_flux, ul_results] = \
                    IUL.calc_int(like,self.likelihoodConf['sourcename'],cl=ul_cl,
                                                skip_global_opt=True,
                                                verbosity = max(verbosity-2,0),
                                                emin=emin, emax=emax,
                                            poi_values = lc['profile']['value'])
            elif ( ul_flux_dflux != None and \
                   lc['normfree']['flux_dflux'] < ul_flux_dflux ) or \
                   ( ul_chi2_ts != None and lc['normfree']['ts'] < ul_chi2_ts):
                ul_type = 'chi2'
                [ul_flux, ul_results] = \
                    IUL.calc_chi2(like,self.likelihoodConf['sourcename'],cl=ul_cl,
                                                 skip_global_opt=True,
                                                 verbosity = max(verbosity-2,0),
                                                 emin=emin, emax=emax)
            if ul_type != None:
                lc['normfree']['ul'] = dict(flux    = ul_flux,
                                            results = ul_results,
                                            type    = ul_type)

            # ----------------------------- FIT 3 -----------------------------

            if verbosity > 1:
                print '- Fit 3 - All parameters of',self.likelihoodConf['sourcename'],'free'
            like.setFreeFlag(self.likelihoodConf['sourcename'], srcfreepar, 1)
            like.syncSrcParams(self.likelihoodConf['sourcename'])
            like.fit(max(verbosity-3, 0))
            lc['allfree'] = dict()
            fitstat = like.optObject.getRetCode()
            if verbosity > 1 and fitstat != 0:
                print "- Fit 3 - Minimizer returned with code: ", fitstat
            lc['allfree']['fitstat'] = fitstat
            lc['allfree']['logL'] = like.logLike.value()
            lc['allfree']['ts'] = like.Ts(self.likelihoodConf['sourcename'])
            if verbosity > 1:
                print '- Fit 3 - log Like:',lc['allfree']['logL'],\
                      '(TS='+str(lc['allfree']['ts'])+')'
            lc['allfree']['nfree']=len(like.freePars(self.likelihoodConf['sourcename']))
            lc['allfree']['flux']=like[self.likelihoodConf['sourcename']].flux(emin, emax)
            pars = dict()
            for pn in like[self.likelihoodConf['sourcename']].funcs['Spectrum'].paramNames:
                p = like[self.likelihoodConf['sourcename']].funcs['Spectrum'].getParam(pn)
                pars[p.getName()] = dict(name      = p.getName(),
                                         value     = p.getTrueValue(),
                                         error     = p.error()*p.getScale(),
                                         free      = p.isFree())
            lc['allfree']['pars'] = pars

            self.lc.append(lc)
            if interim_save_filename != None:
                self.saveProcessedObs(interim_save_filename)
Example #9
0
def GenAnalysisObjects(config, verbose=1, xmlfile=""):

    mes = Loggin.Message()
    #check is the summed likelihood method should be used and get the
    #Analysis objects (observation and (Un)BinnedAnalysis objects)
    folder = config['out']

    Fit = SummedLikelihood.SummedLikelihood()

    EUnBinned = config['ComponentAnalysis']['EUnBinned']
    emintotal = float(config['energy']['emin'])
    emaxtotal = float(config['energy']['emax'])

    evtnum = [config["event"]["evtype"]]  #for std analysis
    evtold = evtnum[0]  #for std analysis

    # Create one obs instance for each component
    if isKey(config['ComponentAnalysis'], 'FrontBack') == 'yes':
        evtnum = [1, 2]
    if isKey(config['ComponentAnalysis'], 'PSF') == 'yes':
        evtnum = [4, 8, 16, 32]
    if isKey(config['ComponentAnalysis'], 'EDISP') == 'yes':
        evtnum = [64, 128, 256, 521]
    oldxml = config['file']['xml']
    for k, evt in enumerate(evtnum):
        config['event']['evtype'] = evt
        config["file"]["xml"] = oldxml.replace(
            ".xml", "_" + typeirfs[evt] + ".xml").replace("_.xml", ".xml")

        if EUnBinned > emintotal and EUnBinned < emaxtotal:
            mes.info(
                "Breaking the analysis in Binned (low energy) and Unbinned (high energies)"
            )
            analysestorun = ["lowE", "highE"]

            for k, TYPE in enumerate(analysestorun):
                tag = TYPE
                if typeirfs[evt] != "":
                    tag += "_" + typeirfs[evt]  # handle name of fits file

                # Tune parameters
                if TYPE is "lowE":
                    config['energy']['emin'] = emintotal
                    config['energy']['emax'] = min(config['energy']['emax'],
                                                   EUnBinned)
                    config['analysis']['likelihood'] = "binned"
                    config['analysis']['ComputeDiffrsp'] = "no"
                elif TYPE is "highE":
                    config['energy']['emin'] = max(config['energy']['emin'],
                                                   EUnBinned)
                    config['energy']['emax'] = emaxtotal
                    config['analysis']['likelihood'] = "unbinned"
                    config['analysis']['ComputeDiffrsp'] = "yes"

                Analyse = Analysis(folder, config, \
                    configgeneric=config,\
                    tag=TYPE,\
                    verbose=verbose)

                Fit_component = Analyse.CreateLikeObject()
                Fit.addComponent(Fit_component)
            FitRunner = Analyse
            FitRunner.obs.Emin = emintotal
            FitRunner.obs.Emax = emaxtotal

        else:
            Analyse = Analysis(folder, config, \
                configgeneric=config,\
                tag=typeirfs[evt], verbose = verbose)

            # if not(xmlfile ==""): Analyse.obs.xmlfile = xmlfile
            Fit_component = Analyse.CreateLikeObject()
            Fit.addComponent(Fit_component)
    FitRunner = Analyse

    config["event"]["evtype"] = evtold
    FitRunner.config = config

    return FitRunner, Fit
Example #10
0
def likelihood_upper_limit3(z):

    #Array to hold results of flux upper limit calculation
    num_ebins = 51  #1 more than the number of bins due to the fencepost problem
    energies = 10**np.linspace(np.log10(6000), np.log10(800000), num_ebins)
    ebin_widths = np.diff(energies)

    sourcemap = '6gev_srcmap_03_pedestal.fits'  #'box_srcmap_artificial_box.fits'#'6gev_srcmap_complete.fits'
    box_flux = np.zeros((num_ebins - 1))
    box_flux_bayesian = np.zeros((num_ebins - 1))
    box_flux_frequentist = np.zeros((num_ebins - 1))

    corr = np.zeros((num_ebins - 1))
    corr2 = np.zeros((num_ebins - 1))

    gll_index = np.zeros((num_ebins - 1))
    disk_index = np.zeros((num_ebins - 1))
    #reconstructed_spectra = np.zeros((num_ebins-1, num_ebins-1))
    #Loop through upper edge of box
    for index in range(6, 48):
        box_width = energies[index] * 2.0 * np.sqrt(1.0 -
                                                    z) / (1 + np.sqrt(1.0 - z))

        print "Calculating upper limit in bin " + str(
            index) + " at energy " + str(energies[index])
        #print "bin " + str(np.argmin(np.abs(energies-energy)))
        #window_low, window_high = window(energy, energies)

        window_low = index - 6
        window_high = index + 2

        #Generate two observations (one above the window and one below)
        #Make two exposure maps
        if index > 6:
            exposure_complete = pyfits.open('6gev_exposure.fits')
            exposure_complete[0].data = exposure_complete[0].data[:window_low +
                                                                  1]
            a = exposure_complete[0]
            exposure_complete[1].data = exposure_complete[1].data[:window_low +
                                                                  1]
            b = exposure_complete[1]
            hdulist = pyfits.HDUList([a, b, exposure_complete[2]])
            os.system('rm exposure_low.fits')
            hdulist.writeto('exposure_low.fits')
            exposure_complete.close()
        if index < 48:
            exposure_complete = pyfits.open('6gev_exposure.fits')
            exposure_complete[0].data = exposure_complete[0].data[window_high:]
            a = exposure_complete[0]
            exposure_complete[1].data = exposure_complete[1].data[window_high:]
            b = exposure_complete[1]
            hdulist = pyfits.HDUList([a, b, exposure_complete[2]])
            os.system('rm exposure_high.fits')
            hdulist.writeto('exposure_high.fits')
            exposure_complete.close()

        #Make two sourcemaps
        if index > 6:
            srcmap_complete = pyfits.open(sourcemap)
            srcmap_complete[0].data = srcmap_complete[0].data[:window_low]
            a = srcmap_complete[0]
            srcmap_complete[2].data = srcmap_complete[2].data[:window_low]
            b = srcmap_complete[2]
            srcmap_complete[3].data = srcmap_complete[3].data[:window_low + 1]
            c = srcmap_complete[3]
            srcmap_complete[4].data = srcmap_complete[4].data[:window_low + 1]
            d = srcmap_complete[4]
            srcmap_complete[5].data = srcmap_complete[5].data[:window_low + 1]
            e = srcmap_complete[5]
            srcmap_complete[6].data = srcmap_complete[6].data[:window_low + 1]
            f = srcmap_complete[6]
            srcmap_complete[7].data = srcmap_complete[7].data[:window_low + 1]
            g = srcmap_complete[7]
            srcmap_complete[8].data = srcmap_complete[8].data[:window_low + 1]
            h = srcmap_complete[8]
            srcmap_complete[9].data = srcmap_complete[9].data[:window_low + 1]
            m = srcmap_complete[9]

            os.system('rm srcmap_low.fits')
            b.header['DSVAL4'] = str() + ':' + str()
            hdulist = pyfits.HDUList(
                [a, srcmap_complete[1], b, c, d, e, f, g, h, m])
            hdulist.writeto('srcmap_low.fits')
            srcmap_complete.close()

        if index < 48:
            srcmap_complete = pyfits.open(sourcemap)
            srcmap_complete[0].data = srcmap_complete[0].data[window_high:]
            a = srcmap_complete[0]
            srcmap_complete[2].data = srcmap_complete[2].data[window_high:]
            r = 0
            for entry in srcmap_complete[2].data:
                entry[0] = int(r)
                r += 1
            #srcmap_complete[2].data[:,0] = np.arange(0, len(srcmap_complete[2].data[:,0]))
            b = srcmap_complete[2]
            srcmap_complete[3].data = srcmap_complete[3].data[window_high:]
            c = srcmap_complete[3]
            srcmap_complete[4].data = srcmap_complete[4].data[window_high:]
            d = srcmap_complete[4]
            srcmap_complete[5].data = srcmap_complete[5].data[window_high:]
            e = srcmap_complete[5]
            srcmap_complete[6].data = srcmap_complete[6].data[window_high:]
            f = srcmap_complete[6]
            srcmap_complete[7].data = srcmap_complete[7].data[window_high:]
            g = srcmap_complete[7]
            srcmap_complete[8].data = srcmap_complete[8].data[window_high:]
            h = srcmap_complete[8]
            srcmap_complete[9].data = srcmap_complete[9].data[window_high:]
            m = srcmap_complete[9]

            os.system('rm srcmap_high.fits')
            hdulist = pyfits.HDUList(
                [a, srcmap_complete[1], b, c, d, e, f, g, h, m])
            hdulist.writeto('srcmap_high.fits')
            srcmap_complete.close()

        summedLike = SummedLikelihood()

        if index > 6:
            obs_low = BinnedObs(srcMaps='srcmap_low.fits',
                                expCube='6gev_ltcube.fits',
                                binnedExpMap='exposure_low.fits',
                                irfs='CALDB')
            like_low = BinnedAnalysis(obs_low,
                                      'xmlmodel_free.xml',
                                      optimizer='NEWMINUIT')
            summedLike.addComponent(like_low)

        if index < 48:
            obs_high = BinnedObs(srcMaps='srcmap_high.fits',
                                 expCube='6gev_ltcube.fits',
                                 binnedExpMap='exposure_high.fits',
                                 irfs='CALDB')
            like_high = BinnedAnalysis(obs_high,
                                       'xmlmodel_free.xml',
                                       optimizer='NEWMINUIT')
            summedLike.addComponent(like_high)

        print "Fitting SummedLikelihood"
        summedLike.ftol = 1e-8
        summedLike.fit(verbosity=3)
        summedLike.writeXml('xmlmodel_free.xml')
        for k in range(len(summedLike.params())):
            summedLike.freeze(k)
        summedLike.writeXml('xmlmodel_fixed.xml')

        print "Fitting all data"

        calculation = 'poisson'
        obs_complete = BinnedObs(srcMaps=sourcemap,
                                 expCube='6gev_ltcube.fits',
                                 binnedExpMap='6gev_exposure.fits',
                                 irfs='CALDB')
        edit_box_xml(100000.0, 1e-15, 0.0)
        like = BinnedAnalysis(obs_complete,
                              'xmlmodel_fixed_box.xml',
                              optimizer='MINUIT')
        like.tol = 1e-8
        like_obj = pyLike.Minuit(like.logLike)
        like.fit(verbosity=3, optObject=like_obj)
        like.writeXml('xmlmodel_fixed_box.xml')

        #Flucuate the window data
        f = pyfits.open(sourcemap)
        poisson_data = np.zeros(
            (len(range(max(window_low, 0), min(window_high, 49))), 50, 50))
        q = 0
        for bin in range(max(window_low, 0), min(window_high, 49)):
            for source in like.sourceNames():
                for j in range(3, 10):
                    if source == f[j].header['EXTNAME']:
                        the_index = j
                    model_counts = np.zeros(
                        (1, len(f[the_index].data[bin].ravel())))[0]
                    num_photons = int(
                        np.round(np.random.poisson(
                            like._srcCnts(source)[bin])))  #
                for photon in range(int(num_photons)):
                    phot_loc = int(
                        make_random(np.arange(0, len(model_counts), 1),
                                    f[the_index].data[bin].ravel()))
                    model_counts[phot_loc] += 1
                model_counts = model_counts.reshape(50, 50)
                poisson_data[q] += model_counts
            q += 1
        f[0].data[max(window_low, 0):min(window_high, 49)] = poisson_data
        os.system('rm box_srcmap_poisson.fits')
        f.writeto('box_srcmap_poisson.fits')
        f.close()

        obs_poisson = BinnedObs(srcMaps='box_srcmap_poisson.fits',
                                expCube='6gev_ltcube.fits',
                                binnedExpMap='6gev_exposure.fits',
                                irfs='CALDB')
        like = BinnedAnalysis(obs_poisson,
                              'xmlmodel_fixed_box.xml',
                              optimizer='NEWMINUIT')
        like.tol = 1e-8
        like_obj = pyLike.Minuit(like.logLike)
        like.fit(verbosity=0, optObject=like_obj)

        if calculation == 'complete':
            obs_calculation = obs_complete
        else:
            obs_calculation = obs_poisson

        print "Finding Upper Limit..."
        null_likelihood = loglikelihood3(energies[index], 1.0e-15,
                                         obs_calculation, z)
        delta_loglike = 0.0

        #increase box flux until likelihood > 2sigma over null likelihood
        box_flux[index] = 3.e-15
        crit_chi2 = 2.706
        while delta_loglike < crit_chi2:
            print "delta loglike = " + str(delta_loglike)
            print "flux = " + str(box_flux[index]) + " likelihood = " + str(
                loglikelihood3(energies[index], box_flux[index],
                               obs_calculation, z))
            box_flux[index] *= 3.0
            delta_loglike = 2.0 * (loglikelihood3(
                energies[index], box_flux[index], obs_calculation, z) -
                                   null_likelihood)

        box_flux[index] *= 1.0 / 3.0
        delta_loglike = 2.0 * (loglikelihood3(energies[index], box_flux[index],
                                              obs_calculation, z) -
                               null_likelihood)
        print "Delta loglike = " + str(delta_loglike)
        while delta_loglike < crit_chi2:
            print "delta loglike = " + str(delta_loglike)
            print "flux = " + str(box_flux[index]) + " likelihood = " + str(
                loglikelihood3(energies[index], box_flux[index],
                               obs_calculation, z))
            box_flux[index] *= 1.5
            delta_loglike = 2.0 * (loglikelihood3(
                energies[index], box_flux[index], obs_calculation, z) -
                                   null_likelihood)

        box_flux[index] *= 1.0 / 1.5
        delta_loglike = 2.0 * (loglikelihood3(energies[index], box_flux[index],
                                              obs_calculation, z) -
                               null_likelihood)

        while delta_loglike < crit_chi2:
            print "delta loglike = " + str(delta_loglike)
            print "flux = " + str(box_flux[index]) + " likelihood = " + str(
                loglikelihood3(energies[index], box_flux[index],
                               obs_calculation, z))
            box_flux[index] *= 1.03
            delta_loglike = 2.0 * (loglikelihood3(
                energies[index], box_flux[index], obs_calculation, z) -
                                   null_likelihood)

        box_flux[index] *= 1.0 / 1.03
        delta_loglike = 2.0 * (loglikelihood3(energies[index], box_flux[index],
                                              obs_calculation, z) -
                               null_likelihood)

        print "delta log like = " + str(delta_loglike)

        calc_cov = False
        if calc_cov:
            like1 = BinnedAnalysis(obs_calculation,
                                   'xmlmodel_fixed_box.xml',
                                   optimizer='DRMNFB')

            like1.thaw(like1.par_index('Disk Component', 'Index'))
            like1.thaw(like1.par_index('Disk Component', 'Prefactor'))
            like1.thaw(like1.par_index('Box Component', 'Normalization'))
            like1.tol = 1e-5
            like1obj = pyLike.Minuit(like1.logLike)
            like1.fit(verbosity=0, optObject=like1obj, covar=False)

            like1.writeXml('xmlmodel_fixed_box.xml')

            like2 = BinnedAnalysis(obs_calculation,
                                   'xmlmodel_fixed_box.xml',
                                   optimizer='NewMinuit')
            like2.tol = 1e-8
            like2obj = pyLike.Minuit(like1.logLike)
            like2.fit(verbosity=3, optObject=like1obj, covar=True)

            #ul = UpperLimit(like1,'Box Component')
            #ul.compute(emin=100.0,emax=500000, delta=3.91)

            #box_flux_bayesian[index] = float(ul.bayesianUL()[0])
            #box_flux_frequentist[index] = float(ul.results[0].value)
            print like2.covariance
            print 'Return code: ' + str(like2obj.getRetCode())
            cov = like2.covariance
            corr[index] = cov[0][1] / np.sqrt(cov[0][0] * cov[1][1])
            corr2[index] = cov[0][2] / np.sqrt(cov[0][0] * cov[2][2])
            print "Correlations:"
            print corr[index]
            print corr2[index]
            #if like2obj.getRetCode()!=0:
            plot_spectrum(like2, energies, index, window_low, window_high)

    if calc_cov:
        file = open('correlation_results.pk1', 'wb')
        pickle.dump([corr, corr2, box_flux_bayesian, box_flux_frequentist],
                    file)
        file.close()

        fig = plt.figure()
        ax = fig.add_subplot(111)
        ax.plot(energies[np.nonzero(corr)],
                corr[np.nonzero(corr)],
                color='blue',
                label='Box vs GC Prefactor')
        ax.plot(energies[np.nonzero(corr2)],
                corr2[np.nonzero(corr2)],
                color='red',
                label='Box vs GC index')
        plt.ylim([-1.0, 1.0])

        plt.legend()
        plt.xscale('log')

        plt.show()

    return box_flux
Example #11
0
def GenAnalysisObjects(config, verbose=1, xmlfile=""):
    import os
    import os.path
    import math
    import SummedLikelihood
    from enrico.xml_model import XmlMaker
    from enrico.extern.configobj import ConfigObj
    from utils import hasKey, isKey
    import Loggin
    mes = Loggin.Message()
    #check is the summed likelihood method should be used and get the
    #Analysis objects (observation and (Un)BinnedAnalysis objects)
    SummedLike = config['Spectrum']['SummedLike']
    folder = config['out']

    # If there is no xml file, create it and print a warning
    if (not os.path.isfile(config['file']['xml'])):
        mes.warning("Xml not found, creating one for the given config %s" %
                    config['file']['xml'])
        XmlMaker(config)

    Fit = SummedLikelihood.SummedLikelihood()
    if hasKey(config, 'ComponentAnalysis') == True:
        # Create one obs instance for each component
        configs = [None] * 4
        Fits = [None] * 4
        Analyses = [None] * 4
        if isKey(config['ComponentAnalysis'], 'FrontBack') == 'yes':
            from enrico.data import fermievtypes
            mes.info("Breaking the analysis in Front/Back events")
            # Set Summed Likelihood to True
            config['Spectrum']['SummedLike'] = 'yes'
            oldxml = config['file']['xml']
            for k, TYPE in enumerate(["FRONT", "BACK"]):
                configs[k] = ConfigObj(config)
                configs[k]['event']['evtype'] = fermievtypes[TYPE]
                try:
                    Analyses[k] = Analysis(folder, configs[k], \
                        configgeneric=config,\
                        tag=TYPE, verbose = verbose)
                    if not (xmlfile == ""): Analyses[k].obs.xmlfile = xmlfile
                    Fits[k] = Analyses[k].CreateLikeObject()
                    Fit.addComponent(Fits[k])
                except RuntimeError, e:
                    if 'RuntimeError: gtltcube execution failed' in str(e):
                        mes.warning("Event type %s is empty! Error is %s" %
                                    (TYPE, str(e)))
            FitRunner = Analyses[0]

        elif isKey(config['ComponentAnalysis'], 'PSF') == 'yes':
            from enrico.data import fermievtypes
            mes.info("Breaking the analysis in PSF 0,1,2,3.")
            # Clone the configs
            # Set Summed Likelihood to True
            config['Spectrum']['SummedLike'] = 'yes'
            for k, TYPE in enumerate(["PSF0", "PSF1", "PSF2", "PSF3"]):
                configs[k] = ConfigObj(config)
                configs[k]['event']['evtype'] = fermievtypes[TYPE]
                try:
                    Analyses[k] = Analysis(folder, configs[k], \
                        configgeneric=config,\
                        tag=TYPE, verbose = verbose)
                    if not (xmlfile == ""): Analyses[k].obs.xmlfile = xmlfile
                    Fits[k] = Analyses[k].CreateLikeObject()
                    Fit.addComponent(Fits[k])
                except RuntimeError, e:
                    if 'RuntimeError: gtltcube execution failed' in str(e):
                        mes.warning("Event type %s is empty! Error is %s" %
                                    (TYPE, str(e)))
            FitRunner = Analyses[0]
Example #12
0
def GenAnalysisObjects(config, verbose = 1, xmlfile =""):
    # Array containing the list of analysis objects (needed to produce the individual residual maps)
    ListOfAnalysisObjects = []
    mes = Loggin.Message()
    #check is the summed likelihood method should be used and get the
    #Analysis objects (observation and (Un)BinnedAnalysis objects)
    folder = config['out']

    # If there are no xml files, create it and print a warning <--- Not sure if this is needed here.
    Fit = SummedLikelihood.SummedLikelihood()
    EUnBinned = config['ComponentAnalysis']['EUnBinned']
    emintotal = float(config['energy']['emin'])
    emaxtotal = float(config['energy']['emax'])

    evtnum = [config["event"]["evtype"]] #for std analysis
    evtold = evtnum[0] #for std analysis
 
    # Create one obs instance for each component. 
    # The first 3 can be combined with splitting in energy. The 4th tries to mimick 4FGL.
    if isKey(config['ComponentAnalysis'],'FrontBack') == 'yes':
        evtnum = [1, 2]
        config['analysis']['likelihood'] = "binned"
    elif isKey(config['ComponentAnalysis'],'PSF') == 'yes':
        evtnum = [4,8,16,32]
        config['analysis']['likelihood'] = "binned"
    elif isKey(config['ComponentAnalysis'],'EDISP') == 'yes':
        evtnum = [64,128,256,521]
        config['analysis']['likelihood'] = "binned"
    elif isKey(config['ComponentAnalysis'],'FGL4') == 'yes':
        # Special case of the PSF component analysis, 
        # where up to 15 components (energy+PSF) are created following 
        # 4FGL prescription.
        from catalogComponents import evtnum, energybins, nbinsbins, zmaxbins, ringwidths, pixelsizes
        config['analysis']['likelihood'] = "binned"
        oldxml = config['file']['xml']

        bin_i = 0
        roi = 0
        # energybins is a dictionary containing an index and a pair of energies
        for ebin_i in energybins:
            # Restrict the analysis to the specified energy range in all cases.
            if emintotal>=energybins[ebin_i][1]:
                continue
            if emaxtotal<=energybins[ebin_i][0]:
                continue
           
            if (roi==0): roi = 2.*ringwidths[ebin_i]+4.
            zmax    = zmaxbins[ebin_i]
            nbinsE  = nbinsbins[ebin_i]
            energybin = energybins[ebin_i]
            
            for k,evt in enumerate(evtnum):
                pixel_size = pixelsizes[ebin_i][k]
                if pixel_size<0: continue
                tag     = "{0}_En{1}".format(typeirfs[evt],ebin_i)
                # Approximation, in the 4FGL the core radius changes from src to src!
                mes.info("Breaking the analysis in bins ~ 4FGL")
                config['event']['evtype'] = evt
                config["file"]["xml"] = oldxml.replace(".xml","_")+typeirfs[evt]+"_"+\
                                        "En{0}.xml".format(ebin_i)
                config["energy"]["emin"] = max(emintotal,energybin[0])
                config["energy"]["emax"] = min(emaxtotal,energybin[1])
                config["analysis"]["likelihood"] = "binned"
                config["analysis"]["ComputeDiffrsp"] = "no"
                config["analysis"]["enumbins_per_decade"] = \
                    int(1.*nbinsE/math.log10(energybin[1]/energybin[0])+0.5)
                config["space"]["rad"] = roi
                config["analysis"]["zmax"] = zmax
                
                Analyse = Analysis(folder, config, \
                    configgeneric=config,\
                    tag=tag, verbose=verbose)
                ListOfAnalysisObjects.append(Analyse)
                
                if not(xmlfile ==""): Analyse.obs.xmlfile = xmlfile
                mes.info('Creating Likelihood object for component.')
                Fit_component = Analyse.CreateLikeObject()
                mes.info('Adding component to the summed likelihood.')
                Fit.addComponent(Fit_component)
            
        FitRunner = Analyse
        FitRunner.obs.Emin = emintotal
        FitRunner.obs.Emax = emaxtotal
        config["energy"]["emin"] = emintotal
        config["energy"]["emax"] = emaxtotal
        config["event"]["evtype"] = evtold
        FitRunner.config = config

        return FitRunner,Fit,ListOfAnalysisObjects

    # Standard (non-4FGL) analysis components
    oldxml = config['file']['xml']
    for k,evt in enumerate(evtnum):
        config['event']['evtype'] = evt
        
        if typeirfs[evt] != "":
            config["file"]["xml"] = oldxml.replace(".xml","_"+typeirfs[evt]+".xml")

        if EUnBinned>emintotal and EUnBinned<emaxtotal:
            mes.info("Breaking the analysis in Binned (low energy) and Unbinned (high energies)")
            analysestorun = ["lowE","highE"]

            for j,TYPE in enumerate(analysestorun):
                tag = TYPE
                if typeirfs[evt] != "" : tag += "_"+typeirfs[evt]# handle name of fits file
                config["file"]["xml"] = oldxml.replace(".xml","_"+tag+".xml")

                # Tune parameters
                if TYPE is "lowE":
                    config['energy']['emin'] = emintotal
                    config['energy']['emax'] = min(config['energy']['emax'],EUnBinned)
                    config['analysis']['likelihood'] = "binned"
                    config['analysis']['ComputeDiffrsp'] = "no"
                elif TYPE is "highE":
                    config['energy']['emin'] = max(config['energy']['emin'],EUnBinned)
                    config['energy']['emax'] = emaxtotal
                    config['analysis']['likelihood'] = "unbinned"
                    config['analysis']['ComputeDiffrsp'] = "yes"

                Analyse = Analysis(folder, config, \
                    configgeneric=config,\
                    tag=tag,\
                    verbose=verbose)
                ListOfAnalysisObjects.append(Analyse)

                mes.info('Creating Likelihood object for component.')
                Fit_component = Analyse.CreateLikeObject()
                mes.info('Adding component to the summed likelihood.')
                Fit.addComponent(Fit_component)
            FitRunner = Analyse
            FitRunner.obs.Emin = emintotal
            FitRunner.obs.Emax = emaxtotal
            config["energy"]["emin"] = emintotal
            config["energy"]["emax"] = emaxtotal

        else:
            Analyse = Analysis(folder, config, \
                configgeneric=config,\
                tag=typeirfs[evt], verbose = verbose)
            
            ListOfAnalysisObjects.append(Analyse)
            if not(xmlfile ==""): Analyse.obs.xmlfile = xmlfile
            mes.info('Creating Likelihood object for component.')
            Fit_component = Analyse.CreateLikeObject()
            mes.info('Adding component to the summed likelihood.')
            Fit.addComponent(Fit_component)
   
    FitRunner = Analyse
    config["event"]["evtype"] = evtold
    FitRunner.config = config

    return FitRunner,Fit,ListOfAnalysisObjects
Example #13
0
class AnalysisManager(Configurable):

    default_config = {
        'convtype': -1,
        'binsperdec': 4,
        'savedir': None,
        'scratchdir': None,
        'target': None,
        'evfile': None,
        'scfile': None,
        'ltcube': None,
        'galdiff': None,
        'isodiff': None,
        'event_types': None,
        'gtbin': None,
        'catalog': '2FGL',
        'optimizer': 'MINUIT',
        'joint': None,
        'irfs': None
    }

    def __init__(self, config=None, **kwargs):
        super(AnalysisManager, self).__init__()
        self.update_default_config(SelectorTask, group='select')

        self.configure(config, **kwargs)

        import pprint

        pprint.pprint(self.config)

        self._like = SummedLikelihood()

    @property
    def like(self):
        return self._like

    @property
    def logLike(self):
        return self._like.logLike

    def setup_roi(self, **kwargs):

        target_name = self.config['target']

        cat = Catalog.get('2fgl')
        self.src = CatalogSource(cat.get_source_by_name(target_name))

        if self.config['savedir'] is None:
            self.set_config('savedir', target_name)

        if not os.path.exists(self.config['savedir']):
            os.makedirs(self.config['savedir'])

        config = self.config

        self.savestate = os.path.join(config['savedir'],
                                      "%s_savestate.P" % target_name)

        self.ft1file = os.path.join(config['savedir'],
                                    "%s_ft1.fits" % target_name)

        self.binfile = os.path.join(config['savedir'],
                                    "%s_binfile.fits" % target_name)
        self.srcmdl = os.path.join(config['savedir'],
                                   "%s_srcmdl.xml" % target_name)

        self.srcmdl_fit = os.path.join(config['savedir'],
                                       "%s_srcmdl_fit.xml" % target_name)


        if os.path.isfile(config['ltcube']) and \
                re.search('\.fits?',config['ltcube']):
            self.ltcube = config['ltcube']
        else:
            ltcube = sorted(glob.glob(config['ltcube']))

            self.ltcube = os.path.join(config['savedir'],
                                       "%s_ltcube.fits" % target_name)

            lt_task = LTSumTask(self.ltcube, infile1=ltcube, config=config)

            lt_task.run()

        self.evfile = config['evfile']  #sorted(glob.glob(config['evfile']))
        #        if len(self.evfile) > 1:
        #            evfile_list = os.path.join(self.config('savedir'),'evfile.txt')
        #            np.savetxt(evfile_list,self.evfile,fmt='%s')
        #            self.evfile = os.path.abspath(evfile_list)
        #        else:
        #            self.evfile = self.evfile[0]

        #        if len(self.ltfile) > 1:
        #            ltfile_list = os.path.join(self.config('savedir'),'ltfile.txt')
        #            np.savetxt(ltfile_list,self.ltfile,fmt='%s')
        #            self.ltfile = os.path.abspath(ltfile_list)
        #        else:
        #            self.ltfile = self.ltfile[0]

        #        print self.evfile
        #        print self.ltfile

        self.skydir = SkyDir(self.src.ra, self.src.dec)

        sel_task = SelectorTask(self.evfile,
                                self.ft1file,
                                ra=self.src.ra,
                                dec=self.src.dec,
                                config=config['select'],
                                overwrite=False)
        sel_task.run()

        cat.create_roi(self.src.ra,
                       self.src.dec,
                       config['isodiff'],
                       config['galdiff'],
                       self.srcmdl,
                       radius=5.0)

        #        self.setup_pointlike()

        self.components = []

        for i, t in enumerate(self.config['joint']):

            print 'Setting up binned analysis ', i

            #            kw = dict(irfs=None,isodiff=None)
            #            kw.update(t)

            analysis = BinnedGtlike(
                self.src,
                target_name + '_%02i' % (i),
                config,
                evfile=self.ft1file,
                srcmdl=self.srcmdl,
                gtselect=dict(evclass=t['evclass'], evtype=t['evtype']),
                #                                    convtype=t['convtype'],
                irfs=t['irfs'],
                isodiff=t['isodiff'])

            analysis.setup_inputs()
            analysis.setup_gtlike()

            self.components.append(analysis)
            self._like.addComponent(analysis.like)

#        for i, p in self.tied_pars.iteritems():
#            print 'Tying parameters ', i, p
#            self.comp_like.tieParameters(p)

        self._like.energies = self.components[0].like.energies

        return

        for i, p in enumerate(self.components[0].like.params()):

            print i, p.srcName, p.getName()

            tied_params = []
            for c in self.components:
                tied_params.append([c.like, p.srcName, p.getName()])
            self.comp_like.tieParameters(tied_params)

#        self.tied_pars = {}
#        for x in self.components:

#            for s in x.like.sourceNames():
#                p = x.like.normPar(s)
#                pidx = x.like.par_index(s,p.getName())

#                if not pidx in self.tied_pars:
#                    self.tied_pars[pidx] = []

#                self.tied_pars[pidx].append([x.like,s,p.getName()])

#                print s, p.getName()
#                self.norm_pars.append([x.like,s,p.getName()])
#            self.norm_pars.append([self.analysis1.like,src,p.getName()])

    def fit(self):

        saved_state = LikelihoodState(self.like)

        print 'Fitting model'
        self.like.fit(verbosity=2, covar=True)

        source_dict = gtlike_source_dict(self.like, self.src.name)

        import pprint
        pprint.pprint(source_dict)

    def write_xml_model(self):

        for c in self.components:
            c.write_model()
#            c.make_srcmodel()

    def make_source_model(self):

        for c in self.components:
            c.make_srcmodel()

#    def gtlike_results(self, **kwargs):
#        from lande.fermi.likelihood.save import source_dict
#        return source_dict(self.like, self.name, **kwargs)

#    def gtlike_summary(self):
#        from lande.fermi.likelihood.printing import gtlike_summary
#        return gtlike_summary(self.like,maxdist=self.config['radius'])

    def free_source(self, name, free=True):
        """ Free a source in the ROI 
            source : string or pointlike source object
            free   : boolean to free or fix parameter
        """
        freePars = self.like.freePars(name)
        normPar = self.like.normPar(name).getName()
        idx = self.like.par_index(name, normPar)
        if not free:
            self.like.setFreeFlag(name, freePars, False)
        else:
            self.like[idx].setFree(True)
        self.like.syncSrcParams(name)

    def save(self):
        from util import save_object
        save_object(self, self.savestate)

    def setup_pointlike(self):

        if os.path.isfile(self.srcmdl): return

        config = self.config

        self._ds = DataSpecification(ft1files=self.ft1file,
                                     ft2files=config['scfile'],
                                     ltcube=self.ltcube,
                                     binfile=self.binfile)

        self._sa = SpectralAnalysis(self._ds,
                                    binsperdec=config['binsperdec'],
                                    emin=config['emin'],
                                    emax=config['emax'],
                                    irf=config['irfs'],
                                    roi_dir=self.skydir,
                                    maxROI=config['radius'],
                                    minROI=config['radius'],
                                    zenithcut=config['zmax'],
                                    event_class=0,
                                    conv_type=config['convtype'])

        sources = []
        point_sources, diffuse_sources = [], []

        galdiff = config['galdiff']
        isodiff = config['isodiff']

        bkg_sources = self.get_background(galdiff, isodiff)
        sources += filter(None, bkg_sources)

        catalog = self.get_catalog(config['catalog'])
        catalogs = filter(None, [catalog])

        for source in sources:
            if isinstance(source, PointSource): point_sources.append(source)
            else: diffuse_sources.append(source)

        self._roi = self._sa.roi(roi_dir=self.skydir,
                                 point_sources=point_sources,
                                 diffuse_sources=diffuse_sources,
                                 catalogs=catalogs,
                                 fit_emin=config['emin'],
                                 fit_emax=config['emax'])

        # Create model file
        self._roi.toXML(self.srcmdl,
                        convert_extended=True,
                        expand_env_vars=True)

    @staticmethod
    def get_catalog(catalog=None, **kwargs):
        if catalog is None or isinstance(catalog, SourceCatalog):
            pass
        elif catalog == 'PSC3Y':
            catalog = Catalog3Y(
                '/u/ki/kadrlica/fermi/catalogs/PSC3Y/gll_psc3yearclean_v1_assoc_v6r1p0.fit',
                latextdir='/u/ki/kadrlica/fermi/catalogs/PSC3Y/',
                prune_radius=0,
                **kwargs)
        elif catalog == '2FGL':
            catalog = Catalog2FGL(
                '/u/ki/kadrlica/fermi/catalogs/2FGL/gll_psc_v08.fit',
                latextdir='/u/ki/kadrlica/fermi/catalogs/2FGL/Templates/',
                prune_radius=0,
                **kwargs)
        elif catalog == "1FGL":
            catalog = FermiCatalog(
                '/u/ki/kadrlica/fermi/catalogs/gll_psc_v02.fit',
                prune_radius=0,
                **kwargs)
        else:
            raise Exception("Unknown catalog: %s" % catalog)

        return catalog

    @staticmethod
    def get_background(galdiff=None, isodiff=None, limbdiff=None):
        """ Diffuse backgrounds
        galdiff: Galactic diffuse counts cube fits file
        isodiff: Isotropic diffuse spectral text file
        limbdiff: Limb diffuse counts map fits file
        """
        backgrounds = []

        if galdiff is None: gal = None
        else:
            gfile = os.path.basename(galdiff)
            gal = get_diffuse_source('MapCubeFunction', galdiff, 'PowerLaw',
                                     None,
                                     os.path.splitext(gfile)[0])
            gal.smodel.set_default_limits()
            gal.smodel.freeze('index')
        backgrounds.append(gal)

        if isodiff is None: iso = None
        else:
            ifile = os.path.basename(isodiff)
            iso = get_diffuse_source('ConstantValue', None, 'FileFunction',
                                     isodiff,
                                     os.path.splitext(ifile)[0])
            iso.smodel.set_default_limits()
        backgrounds.append(iso)

        if limbdiff is None: limb = None
        else:
            lfile = basename(limbdiff)
            dmodel = SpatialMap(limbdiff)
            smodel = PLSuperExpCutoff(norm=3.16e-6,
                                      index=0,
                                      cutoff=20.34,
                                      b=1,
                                      e0=200)
            limb = ExtendedSource(name=name,
                                  model=smodel,
                                  spatial_model=dmodel)
            for i in range(limb.smodel.npar):
                limb.smodel.freeze(i)
            backgrounds.append(limb)
        backgrounds.append(limb)

        return backgrounds