Example #1
0
def likelihood_upper_limit3(z):

    #Array to hold results of flux upper limit calculation
    num_ebins = 51  #1 more than the number of bins due to the fencepost problem
    energies = 10**np.linspace(np.log10(6000), np.log10(800000), num_ebins)
    ebin_widths = np.diff(energies)

    sourcemap = '6gev_srcmap_03_pedestal.fits'  #'box_srcmap_artificial_box.fits'#'6gev_srcmap_complete.fits'
    box_flux = np.zeros((num_ebins - 1))
    box_flux_bayesian = np.zeros((num_ebins - 1))
    box_flux_frequentist = np.zeros((num_ebins - 1))

    corr = np.zeros((num_ebins - 1))
    corr2 = np.zeros((num_ebins - 1))

    gll_index = np.zeros((num_ebins - 1))
    disk_index = np.zeros((num_ebins - 1))
    #reconstructed_spectra = np.zeros((num_ebins-1, num_ebins-1))
    #Loop through upper edge of box
    for index in range(6, 48):
        box_width = energies[index] * 2.0 * np.sqrt(1.0 -
                                                    z) / (1 + np.sqrt(1.0 - z))

        print "Calculating upper limit in bin " + str(
            index) + " at energy " + str(energies[index])
        #print "bin " + str(np.argmin(np.abs(energies-energy)))
        #window_low, window_high = window(energy, energies)

        window_low = index - 6
        window_high = index + 2

        #Generate two observations (one above the window and one below)
        #Make two exposure maps
        if index > 6:
            exposure_complete = pyfits.open('6gev_exposure.fits')
            exposure_complete[0].data = exposure_complete[0].data[:window_low +
                                                                  1]
            a = exposure_complete[0]
            exposure_complete[1].data = exposure_complete[1].data[:window_low +
                                                                  1]
            b = exposure_complete[1]
            hdulist = pyfits.HDUList([a, b, exposure_complete[2]])
            os.system('rm exposure_low.fits')
            hdulist.writeto('exposure_low.fits')
            exposure_complete.close()
        if index < 48:
            exposure_complete = pyfits.open('6gev_exposure.fits')
            exposure_complete[0].data = exposure_complete[0].data[window_high:]
            a = exposure_complete[0]
            exposure_complete[1].data = exposure_complete[1].data[window_high:]
            b = exposure_complete[1]
            hdulist = pyfits.HDUList([a, b, exposure_complete[2]])
            os.system('rm exposure_high.fits')
            hdulist.writeto('exposure_high.fits')
            exposure_complete.close()

        #Make two sourcemaps
        if index > 6:
            srcmap_complete = pyfits.open(sourcemap)
            srcmap_complete[0].data = srcmap_complete[0].data[:window_low]
            a = srcmap_complete[0]
            srcmap_complete[2].data = srcmap_complete[2].data[:window_low]
            b = srcmap_complete[2]
            srcmap_complete[3].data = srcmap_complete[3].data[:window_low + 1]
            c = srcmap_complete[3]
            srcmap_complete[4].data = srcmap_complete[4].data[:window_low + 1]
            d = srcmap_complete[4]
            srcmap_complete[5].data = srcmap_complete[5].data[:window_low + 1]
            e = srcmap_complete[5]
            srcmap_complete[6].data = srcmap_complete[6].data[:window_low + 1]
            f = srcmap_complete[6]
            srcmap_complete[7].data = srcmap_complete[7].data[:window_low + 1]
            g = srcmap_complete[7]
            srcmap_complete[8].data = srcmap_complete[8].data[:window_low + 1]
            h = srcmap_complete[8]
            srcmap_complete[9].data = srcmap_complete[9].data[:window_low + 1]
            m = srcmap_complete[9]

            os.system('rm srcmap_low.fits')
            b.header['DSVAL4'] = str() + ':' + str()
            hdulist = pyfits.HDUList(
                [a, srcmap_complete[1], b, c, d, e, f, g, h, m])
            hdulist.writeto('srcmap_low.fits')
            srcmap_complete.close()

        if index < 48:
            srcmap_complete = pyfits.open(sourcemap)
            srcmap_complete[0].data = srcmap_complete[0].data[window_high:]
            a = srcmap_complete[0]
            srcmap_complete[2].data = srcmap_complete[2].data[window_high:]
            r = 0
            for entry in srcmap_complete[2].data:
                entry[0] = int(r)
                r += 1
            #srcmap_complete[2].data[:,0] = np.arange(0, len(srcmap_complete[2].data[:,0]))
            b = srcmap_complete[2]
            srcmap_complete[3].data = srcmap_complete[3].data[window_high:]
            c = srcmap_complete[3]
            srcmap_complete[4].data = srcmap_complete[4].data[window_high:]
            d = srcmap_complete[4]
            srcmap_complete[5].data = srcmap_complete[5].data[window_high:]
            e = srcmap_complete[5]
            srcmap_complete[6].data = srcmap_complete[6].data[window_high:]
            f = srcmap_complete[6]
            srcmap_complete[7].data = srcmap_complete[7].data[window_high:]
            g = srcmap_complete[7]
            srcmap_complete[8].data = srcmap_complete[8].data[window_high:]
            h = srcmap_complete[8]
            srcmap_complete[9].data = srcmap_complete[9].data[window_high:]
            m = srcmap_complete[9]

            os.system('rm srcmap_high.fits')
            hdulist = pyfits.HDUList(
                [a, srcmap_complete[1], b, c, d, e, f, g, h, m])
            hdulist.writeto('srcmap_high.fits')
            srcmap_complete.close()

        summedLike = SummedLikelihood()

        if index > 6:
            obs_low = BinnedObs(srcMaps='srcmap_low.fits',
                                expCube='6gev_ltcube.fits',
                                binnedExpMap='exposure_low.fits',
                                irfs='CALDB')
            like_low = BinnedAnalysis(obs_low,
                                      'xmlmodel_free.xml',
                                      optimizer='NEWMINUIT')
            summedLike.addComponent(like_low)

        if index < 48:
            obs_high = BinnedObs(srcMaps='srcmap_high.fits',
                                 expCube='6gev_ltcube.fits',
                                 binnedExpMap='exposure_high.fits',
                                 irfs='CALDB')
            like_high = BinnedAnalysis(obs_high,
                                       'xmlmodel_free.xml',
                                       optimizer='NEWMINUIT')
            summedLike.addComponent(like_high)

        print "Fitting SummedLikelihood"
        summedLike.ftol = 1e-8
        summedLike.fit(verbosity=3)
        summedLike.writeXml('xmlmodel_free.xml')
        for k in range(len(summedLike.params())):
            summedLike.freeze(k)
        summedLike.writeXml('xmlmodel_fixed.xml')

        print "Fitting all data"

        calculation = 'poisson'
        obs_complete = BinnedObs(srcMaps=sourcemap,
                                 expCube='6gev_ltcube.fits',
                                 binnedExpMap='6gev_exposure.fits',
                                 irfs='CALDB')
        edit_box_xml(100000.0, 1e-15, 0.0)
        like = BinnedAnalysis(obs_complete,
                              'xmlmodel_fixed_box.xml',
                              optimizer='MINUIT')
        like.tol = 1e-8
        like_obj = pyLike.Minuit(like.logLike)
        like.fit(verbosity=3, optObject=like_obj)
        like.writeXml('xmlmodel_fixed_box.xml')

        #Flucuate the window data
        f = pyfits.open(sourcemap)
        poisson_data = np.zeros(
            (len(range(max(window_low, 0), min(window_high, 49))), 50, 50))
        q = 0
        for bin in range(max(window_low, 0), min(window_high, 49)):
            for source in like.sourceNames():
                for j in range(3, 10):
                    if source == f[j].header['EXTNAME']:
                        the_index = j
                    model_counts = np.zeros(
                        (1, len(f[the_index].data[bin].ravel())))[0]
                    num_photons = int(
                        np.round(np.random.poisson(
                            like._srcCnts(source)[bin])))  #
                for photon in range(int(num_photons)):
                    phot_loc = int(
                        make_random(np.arange(0, len(model_counts), 1),
                                    f[the_index].data[bin].ravel()))
                    model_counts[phot_loc] += 1
                model_counts = model_counts.reshape(50, 50)
                poisson_data[q] += model_counts
            q += 1
        f[0].data[max(window_low, 0):min(window_high, 49)] = poisson_data
        os.system('rm box_srcmap_poisson.fits')
        f.writeto('box_srcmap_poisson.fits')
        f.close()

        obs_poisson = BinnedObs(srcMaps='box_srcmap_poisson.fits',
                                expCube='6gev_ltcube.fits',
                                binnedExpMap='6gev_exposure.fits',
                                irfs='CALDB')
        like = BinnedAnalysis(obs_poisson,
                              'xmlmodel_fixed_box.xml',
                              optimizer='NEWMINUIT')
        like.tol = 1e-8
        like_obj = pyLike.Minuit(like.logLike)
        like.fit(verbosity=0, optObject=like_obj)

        if calculation == 'complete':
            obs_calculation = obs_complete
        else:
            obs_calculation = obs_poisson

        print "Finding Upper Limit..."
        null_likelihood = loglikelihood3(energies[index], 1.0e-15,
                                         obs_calculation, z)
        delta_loglike = 0.0

        #increase box flux until likelihood > 2sigma over null likelihood
        box_flux[index] = 3.e-15
        crit_chi2 = 2.706
        while delta_loglike < crit_chi2:
            print "delta loglike = " + str(delta_loglike)
            print "flux = " + str(box_flux[index]) + " likelihood = " + str(
                loglikelihood3(energies[index], box_flux[index],
                               obs_calculation, z))
            box_flux[index] *= 3.0
            delta_loglike = 2.0 * (loglikelihood3(
                energies[index], box_flux[index], obs_calculation, z) -
                                   null_likelihood)

        box_flux[index] *= 1.0 / 3.0
        delta_loglike = 2.0 * (loglikelihood3(energies[index], box_flux[index],
                                              obs_calculation, z) -
                               null_likelihood)
        print "Delta loglike = " + str(delta_loglike)
        while delta_loglike < crit_chi2:
            print "delta loglike = " + str(delta_loglike)
            print "flux = " + str(box_flux[index]) + " likelihood = " + str(
                loglikelihood3(energies[index], box_flux[index],
                               obs_calculation, z))
            box_flux[index] *= 1.5
            delta_loglike = 2.0 * (loglikelihood3(
                energies[index], box_flux[index], obs_calculation, z) -
                                   null_likelihood)

        box_flux[index] *= 1.0 / 1.5
        delta_loglike = 2.0 * (loglikelihood3(energies[index], box_flux[index],
                                              obs_calculation, z) -
                               null_likelihood)

        while delta_loglike < crit_chi2:
            print "delta loglike = " + str(delta_loglike)
            print "flux = " + str(box_flux[index]) + " likelihood = " + str(
                loglikelihood3(energies[index], box_flux[index],
                               obs_calculation, z))
            box_flux[index] *= 1.03
            delta_loglike = 2.0 * (loglikelihood3(
                energies[index], box_flux[index], obs_calculation, z) -
                                   null_likelihood)

        box_flux[index] *= 1.0 / 1.03
        delta_loglike = 2.0 * (loglikelihood3(energies[index], box_flux[index],
                                              obs_calculation, z) -
                               null_likelihood)

        print "delta log like = " + str(delta_loglike)

        calc_cov = False
        if calc_cov:
            like1 = BinnedAnalysis(obs_calculation,
                                   'xmlmodel_fixed_box.xml',
                                   optimizer='DRMNFB')

            like1.thaw(like1.par_index('Disk Component', 'Index'))
            like1.thaw(like1.par_index('Disk Component', 'Prefactor'))
            like1.thaw(like1.par_index('Box Component', 'Normalization'))
            like1.tol = 1e-5
            like1obj = pyLike.Minuit(like1.logLike)
            like1.fit(verbosity=0, optObject=like1obj, covar=False)

            like1.writeXml('xmlmodel_fixed_box.xml')

            like2 = BinnedAnalysis(obs_calculation,
                                   'xmlmodel_fixed_box.xml',
                                   optimizer='NewMinuit')
            like2.tol = 1e-8
            like2obj = pyLike.Minuit(like1.logLike)
            like2.fit(verbosity=3, optObject=like1obj, covar=True)

            #ul = UpperLimit(like1,'Box Component')
            #ul.compute(emin=100.0,emax=500000, delta=3.91)

            #box_flux_bayesian[index] = float(ul.bayesianUL()[0])
            #box_flux_frequentist[index] = float(ul.results[0].value)
            print like2.covariance
            print 'Return code: ' + str(like2obj.getRetCode())
            cov = like2.covariance
            corr[index] = cov[0][1] / np.sqrt(cov[0][0] * cov[1][1])
            corr2[index] = cov[0][2] / np.sqrt(cov[0][0] * cov[2][2])
            print "Correlations:"
            print corr[index]
            print corr2[index]
            #if like2obj.getRetCode()!=0:
            plot_spectrum(like2, energies, index, window_low, window_high)

    if calc_cov:
        file = open('correlation_results.pk1', 'wb')
        pickle.dump([corr, corr2, box_flux_bayesian, box_flux_frequentist],
                    file)
        file.close()

        fig = plt.figure()
        ax = fig.add_subplot(111)
        ax.plot(energies[np.nonzero(corr)],
                corr[np.nonzero(corr)],
                color='blue',
                label='Box vs GC Prefactor')
        ax.plot(energies[np.nonzero(corr2)],
                corr2[np.nonzero(corr2)],
                color='red',
                label='Box vs GC index')
        plt.ylim([-1.0, 1.0])

        plt.legend()
        plt.xscale('log')

        plt.show()

    return box_flux
Example #2
0
class AnalysisManager(Configurable):

    default_config = { 'convtype'   : -1,
                       'binsperdec' : 4,
                       'savedir'    : None,
                       'scratchdir' : None,
                       'target'     : None,
                       'evfile'     : None,
                       'scfile'     : None,
                       'ltcube'     : None,
                       'galdiff'    : None,
                       'isodiff'    : None,
                       'event_types': None,
                       'gtbin'      : None,
                       'catalog'    : '2FGL',
                       'optimizer'  : 'MINUIT',
                       'joint'      : None,
                       'irfs'       : None }
    
    def __init__(self,config=None,**kwargs):
        super(AnalysisManager,self).__init__()
        self.update_default_config(SelectorTask,group='select')
        
        self.configure(config,**kwargs)

        import pprint

        pprint.pprint(self.config)

        self._like = SummedLikelihood()
        
    
    @property
    def like(self):
        return self._like

    @property
    def logLike(self):
        return self._like.logLike
        
    def setup_roi(self,**kwargs):

        target_name = self.config['target']
        
        cat = Catalog.get('2fgl')
        self.src = CatalogSource(cat.get_source_by_name(target_name))

        
        if self.config['savedir'] is None:
            self.set_config('savedir',target_name)

        if not os.path.exists(self.config['savedir']):
            os.makedirs(self.config['savedir'])
        
        config = self.config

        self.savestate = os.path.join(config['savedir'],
                                    "%s_savestate.P"%target_name)
        
        self.ft1file = os.path.join(config['savedir'],
                                    "%s_ft1.fits"%target_name)

        
            
        self.binfile = os.path.join(config['savedir'],
                                    "%s_binfile.fits"%target_name)
        self.srcmdl = os.path.join(config['savedir'],
                                   "%s_srcmdl.xml"%target_name)
        
        self.srcmdl_fit = os.path.join(config['savedir'],
                                       "%s_srcmdl_fit.xml"%target_name)
        

        if os.path.isfile(config['ltcube']) and \
                re.search('\.fits?',config['ltcube']):
            self.ltcube = config['ltcube']
        else:
            ltcube = sorted(glob.glob(config['ltcube']))

            
            self.ltcube = os.path.join(config['savedir'],
                                       "%s_ltcube.fits"%target_name)

            lt_task = LTSumTask(self.ltcube,infile1=ltcube,
                                config=config)

            lt_task.run()

        
        self.evfile = config['evfile']#sorted(glob.glob(config['evfile']))
#        if len(self.evfile) > 1:
#            evfile_list = os.path.join(self.config('savedir'),'evfile.txt')
#            np.savetxt(evfile_list,self.evfile,fmt='%s')
#            self.evfile = os.path.abspath(evfile_list)
#        else:
#            self.evfile = self.evfile[0]
            
#        if len(self.ltfile) > 1:
#            ltfile_list = os.path.join(self.config('savedir'),'ltfile.txt')
#            np.savetxt(ltfile_list,self.ltfile,fmt='%s')
#            self.ltfile = os.path.abspath(ltfile_list)
#        else:
#            self.ltfile = self.ltfile[0]
            
#        print self.evfile
#        print self.ltfile
        
        self.skydir = SkyDir(self.src.ra,self.src.dec)

        sel_task = SelectorTask(self.evfile,self.ft1file,
                                ra=self.src.ra,dec=self.src.dec,
                                config=config['select'],overwrite=False)
        sel_task.run()

        cat.create_roi(self.src.ra,self.src.dec,
                       config['isodiff'],
                       config['galdiff'],                       
                       self.srcmdl,radius=5.0)
        
#        self.setup_pointlike()

        self.components = []
                
        for i, t in enumerate(self.config['joint']):

            print 'Setting up binned analysis ', i

#            kw = dict(irfs=None,isodiff=None)
#            kw.update(t)
            
            analysis = BinnedGtlike(self.src,
                                    target_name + '_%02i'%(i),
                                    config,
                                    evfile=self.ft1file,
                                    srcmdl=self.srcmdl,
                                    gtselect=dict(evclass=t['evclass'],
                                                  evtype=t['evtype']),
#                                    convtype=t['convtype'],
                                    irfs=t['irfs'],
                                    isodiff=t['isodiff'])

            analysis.setup_inputs()
            analysis.setup_gtlike()
            
            self.components.append(analysis)
            self._like.addComponent(analysis.like)

#        for i, p in self.tied_pars.iteritems():
#            print 'Tying parameters ', i, p            
#            self.comp_like.tieParameters(p)

        self._like.energies = self.components[0].like.energies
            
        return
            
        for i, p in enumerate(self.components[0].like.params()):

            print i, p.srcName, p.getName()

            tied_params = []            
            for c in self.components:
                tied_params.append([c.like,p.srcName,p.getName()])
            self.comp_like.tieParameters(tied_params)
                
#        self.tied_pars = {}
#        for x in self.components:
        
#            for s in x.like.sourceNames():
#                p = x.like.normPar(s)                
#                pidx = x.like.par_index(s,p.getName())

#                if not pidx in self.tied_pars:
#                    self.tied_pars[pidx] = []

#                self.tied_pars[pidx].append([x.like,s,p.getName()])
                    
#                print s, p.getName()        
#                self.norm_pars.append([x.like,s,p.getName()])
#            self.norm_pars.append([self.analysis1.like,src,p.getName()])

    def fit(self):

        saved_state = LikelihoodState(self.like)
        
        print 'Fitting model'
        self.like.fit(verbosity=2, covar=True)

        source_dict = gtlike_source_dict(self.like,self.src.name) 

        import pprint
        pprint.pprint(source_dict)

    def write_xml_model(self):        
        
        for c in self.components:
            c.write_model()
#            c.make_srcmodel()

    def make_source_model(self):

        for c in self.components:
            c.make_srcmodel()
            
#    def gtlike_results(self, **kwargs):
#        from lande.fermi.likelihood.save import source_dict
#        return source_dict(self.like, self.name, **kwargs)

#    def gtlike_summary(self):
#        from lande.fermi.likelihood.printing import gtlike_summary
#        return gtlike_summary(self.like,maxdist=self.config['radius'])
        
    def free_source(self,name,free=True):
        """ Free a source in the ROI 
            source : string or pointlike source object
            free   : boolean to free or fix parameter
        """
        freePars = self.like.freePars(name)
        normPar = self.like.normPar(name).getName()
        idx = self.like.par_index(name, normPar)
        if not free:
            self.like.setFreeFlag(name, freePars, False)
        else:
            self.like[idx].setFree(True)
        self.like.syncSrcParams(name)
        
    def save(self):
        from util import save_object
        save_object(self,self.savestate)
            
    def setup_pointlike(self):

        if os.path.isfile(self.srcmdl): return
        
        config = self.config
        
        self._ds = DataSpecification(ft1files = self.ft1file,
                                     ft2files = config['scfile'],
                                     ltcube   = self.ltcube,
                                     binfile  = self.binfile)

        
        self._sa = SpectralAnalysis(self._ds,
                                    binsperdec = config['binsperdec'],
                                    emin       = config['emin'],
                                    emax       = config['emax'],
                                    irf        = config['irfs'],
                                    roi_dir    = self.skydir,
                                    maxROI     = config['radius'],
                                    minROI     = config['radius'],
                                    zenithcut  = config['zmax'],
                                    event_class= 0,
                                    conv_type  = config['convtype'])

        sources = []
        point_sources, diffuse_sources = [],[]

        galdiff = config['galdiff']        
        isodiff = config['isodiff']

        bkg_sources = self.get_background(galdiff,isodiff)
        sources += filter(None, bkg_sources)
        
        catalog = self.get_catalog(config['catalog'])
        catalogs = filter(None, [catalog])

        for source in sources:
            if isinstance(source,PointSource): point_sources.append(source)
            else: diffuse_sources.append(source)
        
        self._roi=self._sa.roi(roi_dir=self.skydir,
                               point_sources=point_sources,
                               diffuse_sources=diffuse_sources,
                               catalogs=catalogs,
                               fit_emin=config['emin'], 
                               fit_emax=config['emax'])

        # Create model file
        self._roi.toXML(self.srcmdl,
                        convert_extended=True,
                        expand_env_vars=True)
        
    @staticmethod
    def get_catalog(catalog=None, **kwargs):
        if catalog is None or isinstance(catalog,SourceCatalog):
            pass
        elif catalog == 'PSC3Y':
            catalog = Catalog3Y('/u/ki/kadrlica/fermi/catalogs/PSC3Y/gll_psc3yearclean_v1_assoc_v6r1p0.fit',
                                latextdir='/u/ki/kadrlica/fermi/catalogs/PSC3Y/',
                                prune_radius=0,
                                **kwargs)
        elif catalog == '2FGL':
            catalog = Catalog2FGL('/u/ki/kadrlica/fermi/catalogs/2FGL/gll_psc_v08.fit',
                               latextdir='/u/ki/kadrlica/fermi/catalogs/2FGL/Templates/',
                               prune_radius=0,
                               **kwargs)
        elif catalog == "1FGL":
            catalog = FermiCatalog('/u/ki/kadrlica/fermi/catalogs/gll_psc_v02.fit',
                               prune_radius=0,
                               **kwargs)
        else:
            raise Exception("Unknown catalog: %s"%catalog)

        return catalog

    @staticmethod
    def get_background(galdiff=None, isodiff=None, limbdiff=None):
        """ Diffuse backgrounds
        galdiff: Galactic diffuse counts cube fits file
        isodiff: Isotropic diffuse spectral text file
        limbdiff: Limb diffuse counts map fits file
        """
        backgrounds = []

        if galdiff is None: gal=None
        else:
            gfile = os.path.basename(galdiff)
            gal = get_diffuse_source('MapCubeFunction',galdiff,
                                     'PowerLaw',None,
                                     os.path.splitext(gfile)[0])
            gal.smodel.set_default_limits()
            gal.smodel.freeze('index')
        backgrounds.append(gal)

        if isodiff is None: iso=None
        else:
            ifile = os.path.basename(isodiff)
            iso = get_diffuse_source('ConstantValue',None,'FileFunction'
                                     ,isodiff,
                                     os.path.splitext(ifile)[0])
            iso.smodel.set_default_limits()
        backgrounds.append(iso)        

        if limbdiff is None: limb=None
        else:
            lfile = basename(limbdiff)
            dmodel = SpatialMap(limbdiff)
            smodel = PLSuperExpCutoff(norm=3.16e-6,index=0,
                                      cutoff=20.34,b=1,e0=200)
            limb = ExtendedSource(name=name,model=smodel,spatial_model=dmodel)
            for i in range(limb.smodel.npar): limb.smodel.freeze(i)
            backgrounds.append(limb)
        backgrounds.append(limb)

        return backgrounds
Example #3
0
class AnalysisManager(Configurable):

    default_config = {
        'convtype': -1,
        'binsperdec': 4,
        'savedir': None,
        'scratchdir': None,
        'target': None,
        'evfile': None,
        'scfile': None,
        'ltcube': None,
        'galdiff': None,
        'isodiff': None,
        'event_types': None,
        'gtbin': None,
        'catalog': '2FGL',
        'optimizer': 'MINUIT',
        'joint': None,
        'irfs': None
    }

    def __init__(self, config=None, **kwargs):
        super(AnalysisManager, self).__init__()
        self.update_default_config(SelectorTask, group='select')

        self.configure(config, **kwargs)

        import pprint

        pprint.pprint(self.config)

        self._like = SummedLikelihood()

    @property
    def like(self):
        return self._like

    @property
    def logLike(self):
        return self._like.logLike

    def setup_roi(self, **kwargs):

        target_name = self.config['target']

        cat = Catalog.get('2fgl')
        self.src = CatalogSource(cat.get_source_by_name(target_name))

        if self.config['savedir'] is None:
            self.set_config('savedir', target_name)

        if not os.path.exists(self.config['savedir']):
            os.makedirs(self.config['savedir'])

        config = self.config

        self.savestate = os.path.join(config['savedir'],
                                      "%s_savestate.P" % target_name)

        self.ft1file = os.path.join(config['savedir'],
                                    "%s_ft1.fits" % target_name)

        self.binfile = os.path.join(config['savedir'],
                                    "%s_binfile.fits" % target_name)
        self.srcmdl = os.path.join(config['savedir'],
                                   "%s_srcmdl.xml" % target_name)

        self.srcmdl_fit = os.path.join(config['savedir'],
                                       "%s_srcmdl_fit.xml" % target_name)


        if os.path.isfile(config['ltcube']) and \
                re.search('\.fits?',config['ltcube']):
            self.ltcube = config['ltcube']
        else:
            ltcube = sorted(glob.glob(config['ltcube']))

            self.ltcube = os.path.join(config['savedir'],
                                       "%s_ltcube.fits" % target_name)

            lt_task = LTSumTask(self.ltcube, infile1=ltcube, config=config)

            lt_task.run()

        self.evfile = config['evfile']  #sorted(glob.glob(config['evfile']))
        #        if len(self.evfile) > 1:
        #            evfile_list = os.path.join(self.config('savedir'),'evfile.txt')
        #            np.savetxt(evfile_list,self.evfile,fmt='%s')
        #            self.evfile = os.path.abspath(evfile_list)
        #        else:
        #            self.evfile = self.evfile[0]

        #        if len(self.ltfile) > 1:
        #            ltfile_list = os.path.join(self.config('savedir'),'ltfile.txt')
        #            np.savetxt(ltfile_list,self.ltfile,fmt='%s')
        #            self.ltfile = os.path.abspath(ltfile_list)
        #        else:
        #            self.ltfile = self.ltfile[0]

        #        print self.evfile
        #        print self.ltfile

        self.skydir = SkyDir(self.src.ra, self.src.dec)

        sel_task = SelectorTask(self.evfile,
                                self.ft1file,
                                ra=self.src.ra,
                                dec=self.src.dec,
                                config=config['select'],
                                overwrite=False)
        sel_task.run()

        cat.create_roi(self.src.ra,
                       self.src.dec,
                       config['isodiff'],
                       config['galdiff'],
                       self.srcmdl,
                       radius=5.0)

        #        self.setup_pointlike()

        self.components = []

        for i, t in enumerate(self.config['joint']):

            print 'Setting up binned analysis ', i

            #            kw = dict(irfs=None,isodiff=None)
            #            kw.update(t)

            analysis = BinnedGtlike(
                self.src,
                target_name + '_%02i' % (i),
                config,
                evfile=self.ft1file,
                srcmdl=self.srcmdl,
                gtselect=dict(evclass=t['evclass'], evtype=t['evtype']),
                #                                    convtype=t['convtype'],
                irfs=t['irfs'],
                isodiff=t['isodiff'])

            analysis.setup_inputs()
            analysis.setup_gtlike()

            self.components.append(analysis)
            self._like.addComponent(analysis.like)

#        for i, p in self.tied_pars.iteritems():
#            print 'Tying parameters ', i, p
#            self.comp_like.tieParameters(p)

        self._like.energies = self.components[0].like.energies

        return

        for i, p in enumerate(self.components[0].like.params()):

            print i, p.srcName, p.getName()

            tied_params = []
            for c in self.components:
                tied_params.append([c.like, p.srcName, p.getName()])
            self.comp_like.tieParameters(tied_params)

#        self.tied_pars = {}
#        for x in self.components:

#            for s in x.like.sourceNames():
#                p = x.like.normPar(s)
#                pidx = x.like.par_index(s,p.getName())

#                if not pidx in self.tied_pars:
#                    self.tied_pars[pidx] = []

#                self.tied_pars[pidx].append([x.like,s,p.getName()])

#                print s, p.getName()
#                self.norm_pars.append([x.like,s,p.getName()])
#            self.norm_pars.append([self.analysis1.like,src,p.getName()])

    def fit(self):

        saved_state = LikelihoodState(self.like)

        print 'Fitting model'
        self.like.fit(verbosity=2, covar=True)

        source_dict = gtlike_source_dict(self.like, self.src.name)

        import pprint
        pprint.pprint(source_dict)

    def write_xml_model(self):

        for c in self.components:
            c.write_model()
#            c.make_srcmodel()

    def make_source_model(self):

        for c in self.components:
            c.make_srcmodel()

#    def gtlike_results(self, **kwargs):
#        from lande.fermi.likelihood.save import source_dict
#        return source_dict(self.like, self.name, **kwargs)

#    def gtlike_summary(self):
#        from lande.fermi.likelihood.printing import gtlike_summary
#        return gtlike_summary(self.like,maxdist=self.config['radius'])

    def free_source(self, name, free=True):
        """ Free a source in the ROI 
            source : string or pointlike source object
            free   : boolean to free or fix parameter
        """
        freePars = self.like.freePars(name)
        normPar = self.like.normPar(name).getName()
        idx = self.like.par_index(name, normPar)
        if not free:
            self.like.setFreeFlag(name, freePars, False)
        else:
            self.like[idx].setFree(True)
        self.like.syncSrcParams(name)

    def save(self):
        from util import save_object
        save_object(self, self.savestate)

    def setup_pointlike(self):

        if os.path.isfile(self.srcmdl): return

        config = self.config

        self._ds = DataSpecification(ft1files=self.ft1file,
                                     ft2files=config['scfile'],
                                     ltcube=self.ltcube,
                                     binfile=self.binfile)

        self._sa = SpectralAnalysis(self._ds,
                                    binsperdec=config['binsperdec'],
                                    emin=config['emin'],
                                    emax=config['emax'],
                                    irf=config['irfs'],
                                    roi_dir=self.skydir,
                                    maxROI=config['radius'],
                                    minROI=config['radius'],
                                    zenithcut=config['zmax'],
                                    event_class=0,
                                    conv_type=config['convtype'])

        sources = []
        point_sources, diffuse_sources = [], []

        galdiff = config['galdiff']
        isodiff = config['isodiff']

        bkg_sources = self.get_background(galdiff, isodiff)
        sources += filter(None, bkg_sources)

        catalog = self.get_catalog(config['catalog'])
        catalogs = filter(None, [catalog])

        for source in sources:
            if isinstance(source, PointSource): point_sources.append(source)
            else: diffuse_sources.append(source)

        self._roi = self._sa.roi(roi_dir=self.skydir,
                                 point_sources=point_sources,
                                 diffuse_sources=diffuse_sources,
                                 catalogs=catalogs,
                                 fit_emin=config['emin'],
                                 fit_emax=config['emax'])

        # Create model file
        self._roi.toXML(self.srcmdl,
                        convert_extended=True,
                        expand_env_vars=True)

    @staticmethod
    def get_catalog(catalog=None, **kwargs):
        if catalog is None or isinstance(catalog, SourceCatalog):
            pass
        elif catalog == 'PSC3Y':
            catalog = Catalog3Y(
                '/u/ki/kadrlica/fermi/catalogs/PSC3Y/gll_psc3yearclean_v1_assoc_v6r1p0.fit',
                latextdir='/u/ki/kadrlica/fermi/catalogs/PSC3Y/',
                prune_radius=0,
                **kwargs)
        elif catalog == '2FGL':
            catalog = Catalog2FGL(
                '/u/ki/kadrlica/fermi/catalogs/2FGL/gll_psc_v08.fit',
                latextdir='/u/ki/kadrlica/fermi/catalogs/2FGL/Templates/',
                prune_radius=0,
                **kwargs)
        elif catalog == "1FGL":
            catalog = FermiCatalog(
                '/u/ki/kadrlica/fermi/catalogs/gll_psc_v02.fit',
                prune_radius=0,
                **kwargs)
        else:
            raise Exception("Unknown catalog: %s" % catalog)

        return catalog

    @staticmethod
    def get_background(galdiff=None, isodiff=None, limbdiff=None):
        """ Diffuse backgrounds
        galdiff: Galactic diffuse counts cube fits file
        isodiff: Isotropic diffuse spectral text file
        limbdiff: Limb diffuse counts map fits file
        """
        backgrounds = []

        if galdiff is None: gal = None
        else:
            gfile = os.path.basename(galdiff)
            gal = get_diffuse_source('MapCubeFunction', galdiff, 'PowerLaw',
                                     None,
                                     os.path.splitext(gfile)[0])
            gal.smodel.set_default_limits()
            gal.smodel.freeze('index')
        backgrounds.append(gal)

        if isodiff is None: iso = None
        else:
            ifile = os.path.basename(isodiff)
            iso = get_diffuse_source('ConstantValue', None, 'FileFunction',
                                     isodiff,
                                     os.path.splitext(ifile)[0])
            iso.smodel.set_default_limits()
        backgrounds.append(iso)

        if limbdiff is None: limb = None
        else:
            lfile = basename(limbdiff)
            dmodel = SpatialMap(limbdiff)
            smodel = PLSuperExpCutoff(norm=3.16e-6,
                                      index=0,
                                      cutoff=20.34,
                                      b=1,
                                      e0=200)
            limb = ExtendedSource(name=name,
                                  model=smodel,
                                  spatial_model=dmodel)
            for i in range(limb.smodel.npar):
                limb.smodel.freeze(i)
            backgrounds.append(limb)
        backgrounds.append(limb)

        return backgrounds