def GetCovar(srcname, Fit, verbose=True): """Extract covariance matrix""" import pyLikelihood par_index_map = {} indx = 0 for src in Fit.sourceNames(): parNames = pyLikelihood.StringVector() Fit[src].src.spectrum().getFreeParamNames(parNames) for par in parNames: par_index_map["::".join((src, par))] = indx indx += 1 if Fit.covariance is None: raise RuntimeError("Covariance matrix has not been computed.") covar = np.array(Fit.covariance) if len(covar) != len(par_index_map): raise RuntimeError("Covariance matrix size does not match the " + "number of free parameters.") my_covar = [] srcpars = pyLikelihood.StringVector() Fit[srcname].src.spectrum().getFreeParamNames(srcpars) pars = ["::".join((srcname, x)) for x in srcpars] for xpar in pars: ix = par_index_map[xpar] my_covar.append([covar[ix][par_index_map[ypar]] for ypar in pars]) if verbose: print "The covariance matrix is :\n", np.array(my_covar) print return my_covar
def __init__(self, like, srcName): self.like = like self.srcName = srcName self.src = like[srcName].src par_index_map = {} indx = 0 for src in like.sourceNames(): parNames = pyLike.StringVector() like[src].src.spectrum().getFreeParamNames(parNames) for par in parNames: par_index_map["::".join((src, par))] = indx indx += 1 # # Build the source-specific covariance matrix. # if like.covariance is None: raise RuntimeError("Covariance matrix has not been computed.") covar = num.array(like.covariance) if len(covar) != len(par_index_map): raise RuntimeError("Covariance matrix size does not match the " + "number of free parameters.") my_covar = [] srcpars = pyLike.StringVector() like[srcName].src.spectrum().getFreeParamNames(srcpars) pars = ["::".join((srcName, x)) for x in srcpars] for xpar in pars: ix = par_index_map[xpar] my_covar.append([covar[ix][par_index_map[ypar]] for ypar in pars]) self.covar = num.array(my_covar) self.srcpars = srcpars
def __call__(self, xval=100, verbose=0): x = pyLike.dArg(xval) y0 = self.func.value(x) params = pyLike.DoubleVector() self.func.getFreeParamValues(params) eps = 1e-7 num_derivs = [] for i in range(len(params)): new_params = list(params) delta = new_params[i] * eps if delta == 0: delta = eps new_params[i] += delta self.func.setFreeParamValues(new_params) y1 = self.func.value(x) num_derivs.append((y1 - y0) / delta) derivs = pyLike.DoubleVector() self.func.setFreeParamValues(params) self.func.getFreeDerivs(x, derivs) for i, d0, d1 in zip(range(len(derivs)), num_derivs, derivs): try: assert (compare_floats(d0, d1)) if verbose: raise AssertionError except AssertionError: parnames = pyLike.StringVector() self.func.getFreeParamNames(parnames) print "Parameter : ", i, parnames[i] print "%.3e " * len(num_derivs) % tuple(num_derivs) print "%.3e " * len(derivs) % tuple(derivs) + "\n" return tuple(params)
def plot(self, srcs=None, oplot=False, yrange=None, color=None): import hippoplotter as plot if oplot and color is None: color = 'red' elif color is None: color = 'black' if isinstance(srcs, str): total = self._plot_model(srcs, yrange=yrange, color=color, oplot=oplot, lineStyle='Solid') else: if srcs is None: srcs = pyLike.StringVector() self.logLike.getSrcNames(srcs) total = self._plot_model(srcs[0], yrange=yrange, color=color, oplot=oplot) if len(srcs) > 1: for src in list(srcs[1:]): total += self._plot_model(src, oplot=True, color=color) self._plot_model(total, color=color, oplot=True, lineStyle='Solid') self._plot_residuals(total, oplot=oplot, color=color)
def get_function_pars(fn): """Extract the parameters of a pyLikelihood function object (value, scale, bounds). Parameters ---------- fn : pyLikelihood.Function Returns ------- pars : list """ pars = [] par_names = pyLike.StringVector() fn.getParamNames(par_names) for pname in par_names: par = fn.getParam(pname) bounds = par.getBounds() perr = par.error() if par.isFree() else np.nan pars += [dict(name=pname, value=par.getValue(), error=perr, min=bounds[0], max=bounds[1], free=par.isFree(), scale=par.getScale())] return pars
def __init__(self, Fit, pars): self.Fit = Fit self.Model = Fit[pars.srcname].funcs['Spectrum'].genericName() self.ptsrc = pyLikelihood.PointSource_cast(Fit[pars.srcname].src) self.covar = np.array(utils.GetCovar(pars.srcname, self.Fit, False)) self.srcpars = pyLikelihood.StringVector() Fit[pars.srcname].src.spectrum().getFreeParamNames(self.srcpars)
def _setNuisanceParameters(self): # Get the list of the sources sources = list(self.like.model.srcNames) freeParamNames = [] for srcName in sources: thisNamesV = pyLike.StringVector() thisSrc = self.like.logLike.getSource(srcName) thisSrc.spectrum().getFreeParamNames(thisNamesV) thisNames = ["%s_%s" % (srcName, x) for x in thisNamesV] freeParamNames.extend(thisNames) pass nuisanceParameters = collections.OrderedDict() for name in freeParamNames: value = self.getNuisanceParameterValue(name) bounds = self.getNuisanceParameterBounds(name) delta = self.getNuisanceParameterDelta(name) nuisanceParameters["%s_%s" % (self.name, name)] = Parameter( "%s_%s" % (self.name, name), value, min_value=bounds[0], max_value=bounds[1], delta=delta, ) nuisanceParameters["%s_%s" % (self.name, name)].free = self.fit_nuisance_params return nuisanceParameters
def calcBowtie(self, srcName, minE, maxE, numBins): '''This is derived from T. Johnson's likeSED code which was in turn derived from D. Sanchez's pyUnfoldPlot code which was probably based on some code developed by J. Chiang. ''' '''make some energy bounds for the fit, same max and min as for the bands before but with more bins.''' modEs = qU.log_array(numBins, minE, maxE) centEs = [0.5 * (e1 + e2) for e1, e2 in zip(modEs[0:-1], modEs[1:])] '''Get the model.''' mysrc = pyLike.PointSource_cast(self.MIN[srcName].src) spec = [ float(1000. * mysrc.spectrum()(pyLike.dArg(x))) for x in centEs ] if (self.MIN.covariance is None): print "Whoa, you didn't compute the covariance yet..." bt = [0] else: bt = [] covArray = np.array(self.MIN.covariance) srcCovArray = [] par_index_map = {} indx = 0 for src in self.MIN.sourceNames(): parNames = pyLike.StringVector() self.MIN[src].src.spectrum().getFreeParamNames(parNames) for par in parNames: par_index_map['::'.join((src, par))] = indx indx += 1 srcPars = pyLike.StringVector() self.MIN[srcName].src.spectrum().getFreeParamNames(srcPars) pars = ['::'.join((srcName, x)) for x in srcPars] for xpar in pars: ix = par_index_map[xpar] srcCovArray.append( [covArray[ix][par_index_map[ypar]] for ypar in pars]) cov = np.array(srcCovArray) ''' The whole point here is to get the srcCovArray.''' for x in centEs: arg = pyLike.dArg(x) partials = np.array( [mysrc.spectrum().derivByParam(arg, y) for y in srcPars]) val = np.sqrt(np.dot(partials, np.dot(cov, partials))) '''These should come out same as the model so convert to ph/cm^2/s/GeV as well.''' bt += [float(1000. * val)] return centEs, bt, spec
def nested(self): if self.src.getType() != "Composite": return None comp = pyLike.CompositeSource.cast(self.src) sv = pyLike.StringVector() comp.getSrcNames(sv) n = [sv[i] for i in range(sv.size())] return n
def _loadSources(self): srcNames = pyLike.StringVector() self.logLike.getSrcNames(srcNames) self.srcNames = tuple(srcNames) self.srcs = {} for name in srcNames: self.srcs[name] = Source(self.logLike.getSource(name)) self._walk() self.printFreeOnly = False
def getPriorParams(self): prior = self.parameter.log_prior() if prior is None: return None pars = {} names = pyLike.StringVector() prior.getParamNames(names) for name in names: pars[name] = prior.getParamValue(name) return pars
def init_function_pars(): global FUNCTION_PAR_NAMES global FUNCTION_NORM_PARS global FUNCTION_DEFAULT_PARS FUNCTION_PAR_NAMES = {} FUNCTION_NORM_PARS = {} funcFactory = pyLike.SourceFactory_funcFactory() names = pyLike.StringVector() funcFactory.getFunctionNames(names) for fname in names: pars = FUNCTION_DEFAULT_PARS.setdefault(fname, {}) par_names = FUNCTION_PAR_NAMES.setdefault(fname, []) if 'EblAtten' in fname and fname[len('EblAtten::' ):] in FUNCTION_DEFAULT_PARS: pars.update(FUNCTION_DEFAULT_PARS[fname[len('EblAtten::'):]]) fn = funcFactory.create(fname) try: FUNCTION_NORM_PARS[fname] = fn.normPar().getName() except Exception: FUNCTION_NORM_PARS[fname] = None params = pyLike.ParameterVector() fn.getParams(params) for i, p in enumerate(params): pname = p.getName() par_names += [pname] if pname == 'Scale': pars.setdefault(pname, DEFAULT_SCALE_DICT) elif pname == 'Prefactor': pars.setdefault(pname, DEFAULT_NORM_DICT) else: pars.setdefault(pname, {}) bounds = p.getBounds() par_dict = dict(name=pname, value=p.getValue(), min=bounds[0], max=bounds[1], scale=1.0, free=False) par_dict.update(copy.deepcopy(pars[pname])) par_dict['name'] = pname pars[pname] = par_dict
def __init__(self, func, srcName=None, source_obj=None): self.func = func self.srcName = srcName names = pyLike.StringVector() func.getParamNames(names) self.paramNames = list(names) self.params = {} for name in self.paramNames: self.params[name] = Parameter(self.func.getParam(name), srcName, source_obj) self._parIds = []
def _compositeIndex(self, target_component, target_src, target_par): indx = -1 # # Loop over non-tied parameters # for tiedName, component in zip(self.srcNames, self.components): srcNames = component.sourceNames() for src in srcNames: if src != tiedName: spec = component.model[src].funcs['Spectrum'] parnames = pyLike.StringVector() spec.getFreeParamNames(parnames) for parname in parnames: indx += 1 if (target_component == tiedName and target_src == src and target_par == parname): return indx # # Loop over tied parameters for common sources (just need to do # this for the first component). # spec = self.components[0].model[self.srcNames[0]].funcs['Spectrum'] parnames = pyLike.StringVector() spec.getFreeParamNames(parnames) for parname in parnames: if parname != spec.normPar().getName(): indx += 1 if target_src in self.srcNames and target_par == parname: return indx # # Loop over normalization parameters # for src, component in zip(self.srcNames, self.components): spec = component.model[src].funcs['Spectrum'] if spec.normPar().isFree(): parname = spec.normPar().getName() indx += 1 if target_src == src and target_par == parname: return indx return indx
def _set_errors(self, errors): my_errors = list(errors) # # Set errors for untied sources # for tiedName, component in zip(self.srcNames, self.components): srcNames = component.sourceNames() for src in srcNames: if src != tiedName: spec = component.model[src].funcs['Spectrum'] parnames = pyLike.StringVector() spec.getFreeParamNames(parnames) for parname in parnames: par_index = component.par_index(src, parname) component.model[par_index].setError(my_errors.pop(0)) # # Set errors for tied parameters for common sources # spec = self.components[0].model[self.srcNames[0]].funcs['Spectrum'] numTiedPars = spec.getNumFreeParams() if spec.normPar().isFree(): numTiedPars -= 1 for src, component in zip(self.srcNames, self.components): tied_errors = my_errors[:numTiedPars] spec = component.model[src].funcs['Spectrum'] parnames = pyLike.StringVector() spec.getFreeParamNames(parnames) for parname in parnames: if parname != spec.normPar().getName(): par_index = component.par_index(src, parname) component.model[par_index].setError(tied_errors.pop(0)) # # Set errors for normalization parameters # norm_errors = my_errors[numTiedPars:] for src, component in zip(self.srcNames, self.components): spec = component.model[src].funcs['Spectrum'] if spec.normPar().isFree(): parname = spec.normPar().getName() par_index = component.par_index(src, parname) component.model[par_index].setError(norm_errors.pop(0))
def restore(self, srcName=None): if srcName is None: for par, likePar in zip(self.pars, self.like.params()): par.setDataMembers(likePar) else: parNames = pyLikelihood.StringVector() self.like[srcName].src.spectrum().getParamNames(parNames) for parName in parNames: indx = self.like.par_index(srcName, parName) likePar = self.like.params()[indx] self.pars[indx].setDataMembers(likePar) self.like.syncSrcParams()
def run(self, argv): """Run this analysis""" args = self.parser.parse_args(argv) print("srcmaps = %s" % (args.srcmaps)) obs = BinnedAnalysis.BinnedObs(irfs=args.irfs, expCube=args.expcube, srcMaps=args.srcmaps, binnedExpMap=args.bexpmap) like = BinnedAnalysis.BinnedAnalysis( obs, optimizer='MINUIT', srcModel=GtMergeSourceMaps.NULL_MODEL, wmap=None) like.logLike.set_use_single_fixed_map(False) print("Reading xml model from %s" % args.srcmdl) source_factory = pyLike.SourceFactory(obs.observation) source_factory.readXml(args.srcmdl, BinnedAnalysis._funcFactory, False, True, True) strv = pyLike.StringVector() source_factory.fetchSrcNames(strv) source_names = [strv[i] for i in range(strv.size())] missing_sources = [] srcs_to_merge = [] for source_name in source_names: try: source = source_factory.releaseSource(source_name) # EAC, add the source directly to the model like.logLike.addSource(source) srcs_to_merge.append(source_name) except KeyError: missing_sources.append(source_name) comp = like.mergeSources(args.merged, source_names, 'ConstantValue') like.logLike.getSourceMap(comp.getName()) print("Merged %i sources into %s" % (len(srcs_to_merge), comp.getName())) if len(missing_sources) > 0: print("Missed sources: ", missing_sources) print("Writing output source map file %s" % args.outfile) like.logLike.saveSourceMaps(args.outfile, False, False) if args.gzip: os.system("gzip -9 %s" % args.outfile) print("Writing output xml file %s" % args.outxml) like.writeXml(args.outxml)
def _setNuisanceParameters(self): # Get the list of the sources sources = list(self.like.model.srcNames) freeParamNames = [] for srcName in sources: thisNamesV = pyLike.StringVector() thisSrc = self.like.logLike.getSource(srcName) thisSrc.spectrum().getFreeParamNames(thisNamesV) thisNames = map(lambda x: "%s_%s" % (srcName, x), thisNamesV) freeParamNames.extend(thisNames) pass nuisanceParameters = collections.OrderedDict() for name in freeParamNames: value = self.getNuisanceParameterValue(name) bounds = self.getNuisanceParameterBounds(name) delta = self.getNuisanceParameterDelta(name) nuisanceParameters["%s_%s" % (self.name, name)] = Parameter("%s_%s" % (self.name, name), value, min_value=bounds[0], max_value=bounds[1], delta=delta) nuisanceParameters["%s_%s" % (self.name, name)].free = self.innerMinimization # Prepare a callback which will set the parameter value in the pyLikelihood object if it gets # changed # def this_callback(parameter): # # _, src, pname = parameter.name.split("_") # # try: # # self.like.model[src].funcs['Spectrum'].getParam(pname).setValue(parameter.value) # # except: # # import pdb;pdb.set_trace() # # nuisanceParameters["%s_%s" % (self.name, name)].add_callback(this_callback) return nuisanceParameters
def get_dnde_error_mev_gtlike(spectrum, covariance_matrix, energies): """ asume energy in mev and return flux in units of ph/cm**2/s/MeV. """ from .models import gtlike_unscale_all_parameters spectrum = gtlike_unscale_all_parameters(spectrum) dnde_err = np.empty_like(energies) for i, energy in enumerate(energies): # method taken from pyLikelihood.FluxDensity srcpars = pyLikelihood.StringVector() spectrum.getParamNames(srcpars) arg = pyLikelihood.dArg(energy) partials = np.array( [spectrum.derivByParam(arg, x) for x in srcpars]) dnde_err[i] = np.sqrt( np.dot(partials, np.dot(covariance_matrix, partials))) return dnde_err
def run_analysis(self, argv): """Run this analysis""" args = self._parser.parse_args(argv) obs = BinnedAnalysis.BinnedObs(irfs=args.irfs, expCube=args.expcube, srcMaps=args.cmap, binnedExpMap=args.bexpmap) like = BinnedAnalysis.BinnedAnalysis(obs, optimizer='MINUIT', srcModel=GtSrcmapsCatalog.NULL_MODEL, wmap=None) source_factory = pyLike.SourceFactory(obs.observation) source_factory.readXml(args.srcmdl, BinnedAnalysis._funcFactory, False, True, True) srcNames = pyLike.StringVector() source_factory.fetchSrcNames(srcNames) min_idx = args.srcmin max_idx = args.srcmax if max_idx < 0: max_idx = srcNames.size(); for i in xrange(min_idx, max_idx): if i == min_idx: like.logLike.saveSourceMaps(args.outfile) pyLike.CountsMapBase.copyAndUpdateDssKeywords(args.cmap, args.outfile, None, args.irfs) srcName = srcNames[i] source = source_factory.releaseSource(srcName) like.logLike.addSource(source, False) like.logLike.saveSourceMap_partial(args.outfile, source) like.logLike.deleteSource(srcName) if args.gzip: os.system("gzip -9 %s" % args.outfile)
def get_priors(like): """Extract priors from a likelihood object.""" npar = len(like.params()) vals = np.ones(npar) errs = np.ones(npar) has_prior = np.array([False] * npar) for i, p in enumerate(like.params()): prior = like[i].log_prior() if prior is None: continue par_names = pyLike.StringVector() prior.getParamNames(par_names) if not 'Mean' in par_names: raise Exception('Failed to find Mean in prior parameters.') if not 'Sigma' in par_names: raise Exception('Failed to find Sigma in prior parameters.') for t in par_names: if t == 'Mean': vals[i] = prior.parameter(t).getValue() if t == 'Sigma': errs[i] = prior.parameter(t).getValue() has_prior[i] = True return vals, errs, has_prior
def build_pointlike_model(spectrum): """ Convert a gtlike model object to a pointlike model object. >>> spectrum = _funcFactory.create('PowerLaw') >>> param=spectrum.getParam('Prefactor') >>> param.setScale(10) >>> param.setTrueValue(1e-9) >>> param.setBounds(1e-11,1e-9) >>> param.setError(3e-11) >>> param.setFree(True) >>> param=spectrum.getParam('Index') >>> param.setScale(2) >>> param.setBounds(-10,5) >>> param.setTrueValue(-3) >>> param.setError(0.125) >>> param.setFree(False) Check spectral values: >>> model = build_pointlike_model(spectrum) >>> energies = np.logspace(1, 6, 10000) >>> from uw.darkmatter.spectral import DMFitFunction >>> np.allclose(DMFitFunction.call_pylike_spectrum(spectrum, energies), ... model(energies), rtol=1e-20, atol=1e-20) True Check prefactor: >>> model.get_scale('norm') 10.0 >>> np.allclose(model.get_limits('norm'),[1e-10, 1e-08]) True >>> np.allclose(model.getp('norm'),1e-9) True >>> np.allclose(model.error('norm'),1e-10) True >>> model.get_free('norm') True Check index params: >>> model.get_scale('index') -2.0 >>> model.get_limits('index') [-10.0, 20.0] >>> model.getp('index') 3.0 >>> np.allclose(model.error('index'),0.25) True >>> model.get_free('index') False Example creating a FileFunction object: First, create file out of old model: >>> from tempfile import NamedTemporaryFile >>> temp = NamedTemporaryFile() >>> filename = temp.name >>> model.save_profile(filename, emin=1, emax=1e6) Now, make FileFunction: >>> spectrum = pyLikelihood.FileFunction() >>> spectrum.readFunction(filename) Set param values: >>> param=spectrum.getParam('Normalization') >>> param.setScale(2) >>> param.setTrueValue(4) >>> param.setBounds(.1,10) >>> model = build_pointlike_model(spectrum) Test spectral points: >>> np.allclose(DMFitFunction.call_pylike_spectrum(spectrum, energies), ... model(energies), rtol=1e-20, atol=1e-20) True Test param values: >>> model.get_scale('Normalization') 2.0 >>> model.getp('Normalization') 4.0 >>> model.get_limits('Normalization') [0.2, 20.0] """ gtlike_name = spectrum.genericName() if gtlike_name == 'FileFunction': ff = pyLikelihood.FileFunction_cast(spectrum) filename = ff.filename() model = FileFunction(file=filename) else: model = XML_to_Model.modict[gtlike_name]() param_names = pyLikelihood.StringVector() spectrum.getParamNames(param_names) for gtlike_name in param_names: pointlike_name = model.get_pointlike_name(gtlike_name) param = spectrum.getParam(gtlike_name) if pointlike_name in model.default_extra_params.keys(): # no mapping for extra params model.setp(pointlike_name, param.getTrueValue()) else: model.setp_gtlike(pointlike_name, param.getTrueValue()) if pointlike_name in model.param_names: model.set_mapper(pointlike_name, LinearMapper) if param.getBounds()[0] < -3.4e+38 and param.getBounds( )[1] > 3.4e+38: # No effective bound on parameters pass else: model.set_limits_gtlike( pointlike_name, lower=param.getBounds()[0] * param.getScale(), upper=param.getBounds()[1] * param.getScale(), scale=param.getScale()) model.set_error(pointlike_name, abs(param.error() * param.getScale())) model.set_free(pointlike_name, param.isFree()) return model