示例#1
0
    def PerformLikelihoodAnalysis(self):

        print "\nPerforming likelihood analysis on position: ra=%s, dec=%s" % (
            self.xref, self.yref)

        # Wait a random amount of time between 1 and 5 minutes before starting in order to not crash the asf/nsf disks at SLAC
        waitTime = random.random() * 300
        time.sleep(waitTime)

        # Defind the scratch directory
        JobID = os.environ.get('LSB_JOBID')
        Username = getpass.getuser()
        ScratchDirectory = "/scratch/%s/%s/" % (Username, JobID)

        # Define the pfile directory
        if JobID == 'None':
            PFILESDirectory = "%s/pfiles_%s/" % (self.outdir, self.binNumber)
        else:
            PFILESDirectory = "%s/pfiles/" % ScratchDirectory

        # Create the output directory if it doesn't already exist
        if (os.path.isdir(self.outdir) == False):
            print "\n >> Creating Directory: " + self.outdir
            cmd = "mkdir " + self.outdir
            os.system(cmd)

        # Define where to save the results
        likelihoodResults = '%s/likelihoodResults_bin%s.txt' % (self.outdir,
                                                                self.binNumber)

        # Remove any pre-existing pfiles
        if (os.path.isdir(PFILESDirectory) == True):
            cmd = "rm -r %s" % PFILESDirectory
            os.system(cmd)

        # Set the new pfiles directory
        SetPfilesDirectory(PFILESDirectory)

        # Make a copy of the source model
        xmlModelWithPutativeSource = '%s/ModelSource_bin%s.xml' % (
            self.outdir, self.binNumber)
        cmd = "cp " + self.srcmdl + " " + xmlModelWithPutativeSource
        print cmd
        os.system(cmd)

        # Add a putative point source at the requested location
        #		AddCandidateSource(self.xref, self.yref, xmlModelWithPutativeSource)
        ModifySourceModel(xmlModelWithPutativeSource, self.xref, self.yref)

        # # Import the necessary gtapps
        # gtlike = GtApp('gtlike')

        # # Run the likelihood analysis
        # print '\nPerforming the likelihood fit:'
        # gtlike.run(statistic=self.statistic,
        # 				scfile=self.scfile,
        # 				evfile=self.evfile,
        # 				expmap=self.expmap,
        # 				expcube=self.expcube,
        # 				srcmdl=xmlModelWithPutativeSource,
        # 				irfs=self.irfs,
        # 				optimizer=self.optimizer,
        # 				results=likelihoodResults,
        # 				plot='no',
        # 				save='yes')

        # Setup the unbinned likelihood object
        print '\nPerforming the likelihood fit:'
        try:

            obs = UnbinnedObs(self.evfile,
                              self.scfile,
                              expMap=self.expmap,
                              expCube=self.expcube,
                              irfs=self.irfs)

            # Define the likelihood object
            #like = UnbinnedAnalysis(obs,xmlModelWithPutativeSource,optimizer=self.optimizer)
            like = UnbinnedAnalysis(obs,
                                    xmlModelWithPutativeSource,
                                    optimizer='MINUIT')

            # Setup the likelihood parameters
            Source = 'CandidateSource'
            Integral = like.par_index(Source, 'Integral')
            Index = like.par_index(Source, 'Index')
            LowerLimit = like.par_index(Source, 'LowerLimit')
            UpperLimit = like.par_index(Source, 'UpperLimit')

            # Setup the likelihood bounds
            like[Integral].setScale(1e-3)
            like[Index].setBounds(-5, -0.5)
            # like[LowerLimit] = emin
            # like[UpperLimit] = emax

            # Perform the likelihood fit
            #optObject = pyLike.NewMinuit(like.logLike)
            #like.fit(verbosity=0,covar=True,tol=0.02,optObject=optObject)
            like.fit(verbosity=1,
                     covar=True,
                     tol=1e-10,
                     optimizer='MINUIT',
                     optObject=None)

            # Extract the best fit index
            IndexValue = like[Index].value()
            IndexError = like[Index].error()

            # Extract the best fit flux
            FluxValue = like.flux(Source, emin=100, emax=3e5)
            FluxError = like.fluxError(Source, emin=100, emax=3e5)

            # Extract likelihood fit results
            print '\nLikelihood Results:'
            print like.model[Source]
            print "TS = %s" % like.Ts(Source)
            print "Flux = %s +/- %s" % (FluxValue, FluxError)
            print "Index = %s +/- %s" % (IndexValue, IndexError)

            # Save the xml file
            like.writeXml(xmlFile=xmlModelWithPutativeSource)

        except Exception, message:
            print traceback.format_exc()
示例#2
0
	def PerformLikelihoodAnalysis(self):

		print "\nPerforming likelihood analysis on position: ra=%s, dec=%s" % (self.xref, self.yref)

		# Wait a random amount of time between 1 and 5 minutes before starting in order to not crash the asf/nsf disks at SLAC
		waitTime = random.random()*300
		time.sleep(waitTime)

		# Defind the scratch directory
		JobID = os.environ.get('LSB_JOBID')
		Username = getpass.getuser()
		ScratchDirectory = "/scratch/%s/%s/" % (Username, JobID)

		# Define the pfile directory
		if JobID == 'None':
			PFILESDirectory = "%s/pfiles_%s/" % (self.outdir, self.binNumber)	
		else:
			PFILESDirectory = "%s/pfiles/" % ScratchDirectory

		# Create the output directory if it doesn't already exist
	  	if(os.path.isdir(self.outdir)==False):
	  		print "\n >> Creating Directory: " + self.outdir
	  		cmd = "mkdir " + self.outdir
	  		os.system(cmd)

		# Define where to save the results
		likelihoodResults = '%s/likelihoodResults_bin%s.txt' % (self.outdir, self.binNumber)

		# Remove any pre-existing pfiles
		if(os.path.isdir(PFILESDirectory)==True):
	  		cmd = "rm -r %s" % PFILESDirectory
	  		os.system(cmd)			

		# Set the new pfiles directory
		SetPfilesDirectory(PFILESDirectory)

		# Make a copy of the source model
		xmlModelWithPutativeSource = '%s/ModelSource_bin%s.xml' % (self.outdir, self.binNumber)
		cmd = "cp " + self.srcmdl + " " + xmlModelWithPutativeSource
		print cmd
	  	os.system(cmd)

		# Add a putative point source at the requested location
#		AddCandidateSource(self.xref, self.yref, xmlModelWithPutativeSource)
		ModifySourceModel(xmlModelWithPutativeSource, self.xref, self.yref)

		# # Import the necessary gtapps	
		# gtlike = GtApp('gtlike')

		# # Run the likelihood analysis
		# print '\nPerforming the likelihood fit:'		
		# gtlike.run(statistic=self.statistic,
		# 				scfile=self.scfile,
		# 				evfile=self.evfile,
		# 				expmap=self.expmap,
		# 				expcube=self.expcube,
		# 				srcmdl=xmlModelWithPutativeSource,
		# 				irfs=self.irfs,
		# 				optimizer=self.optimizer,
		# 				results=likelihoodResults,
		# 				plot='no',
		# 				save='yes')

		# Setup the unbinned likelihood object
		print '\nPerforming the likelihood fit:'
		try:
		
			obs = UnbinnedObs(self.evfile,self.scfile,expMap=self.expmap,expCube=self.expcube,irfs=self.irfs)
		
			# Define the likelihood object
			#like = UnbinnedAnalysis(obs,xmlModelWithPutativeSource,optimizer=self.optimizer)
			like = UnbinnedAnalysis(obs,xmlModelWithPutativeSource,optimizer='MINUIT')
				
			# Setup the likelihood parameters
			Source = 'CandidateSource'
			Integral = like.par_index(Source, 'Integral')
			Index = like.par_index(Source, 'Index')
			LowerLimit = like.par_index(Source, 'LowerLimit')
			UpperLimit = like.par_index(Source, 'UpperLimit')
		
			# Setup the likelihood bounds
			like[Integral].setScale(1e-3)
			like[Index].setBounds(-5, -0.5)
			# like[LowerLimit] = emin
			# like[UpperLimit] = emax
		
			# Perform the likelihood fit
			#optObject = pyLike.NewMinuit(like.logLike)				  
			#like.fit(verbosity=0,covar=True,tol=0.02,optObject=optObject)
			like.fit(verbosity=1,covar=True,tol=1e-10,optimizer='MINUIT', optObject=None)
	
			# Extract the best fit index
			IndexValue = like[Index].value()
			IndexError = like[Index].error()

			# Extract the best fit flux
			FluxValue = like.flux(Source, emin=100, emax=3e5)
			FluxError = like.fluxError(Source, emin=100, emax=3e5)

			# Extract likelihood fit results
			print '\nLikelihood Results:'
			print like.model[Source]
			print "TS = %s" % like.Ts(Source)
			print "Flux = %s +/- %s" % (FluxValue, FluxError)
			print "Index = %s +/- %s" % (IndexValue, IndexError)

			# Save the xml file
			like.writeXml(xmlFile=xmlModelWithPutativeSource)
			
		except Exception, message:
			print traceback.format_exc()