def build_roi(name, snrdata, latdata): snrdata=loaddict(snrdata) latdata=loaddict(latdata) roi_dir = SkyDir(*snrdata[name]['cel']) snrsize = snrdata[name]['size'] if isinstance(snrsize,list) and len(snrsize) == 2: snrradius = math.sqrt(snrsize[0]*snrsize[1])/2.0 else: snrradius = snrsize/2.0 ds = DataSpecification(**latdata['data']) sa = SpectralAnalysis(ds, binsperdec = 4, emin = 1e4, emax = 10**5.5, irf = "P7SOURCE_V6", roi_dir = roi_dir, maxROI = 10, minROI = 10, event_class= 0) diffuse_sources = get_default_diffuse(**latdata['diffuse']) catalog = Catalog2FGL(**latdata['catalog']) roi=sa.roi(point_sources=[], diffuse_sources=diffuse_sources, catalogs=catalog) print 'bins',roi.bin_edges for source in roi.get_sources(): if np.degrees(source.skydir.difference(roi_dir)) < snrradius + 0.5: roi.del_source(source) snr = ExtendedSource( name = name, model = PowerLaw(), spatial_model = Disk(sigma=snrradius, center=roi_dir) ) roi.add_source(snr) return roi
def setup_roi(name, snrdata, catalog_dict=dict(), roi_dict=dict()): snr=yaml.load(open(snrdata))[name] skydir=SkyDir(*snr['cel']) size=snr['size'] ds=DataSpecification( ft1files='$FERMILANDE/data/PWNCAT2/nov_30_2011/ft1_PWNCAT2_allsky.fits', ft2files='$FERMILANDE/data/PWNCAT2/nov_30_2011/ft2_PWNCAT2_allsky.fits', ltcube='$FERMILANDE/data/PWNCAT2/nov_30_2011/ltcube_PWNCAT2_allsky.fits', binfile='$FERMILANDE/data/PWNCAT2/nov_30_2011/binned_%s.fits' % 4) sa=SpectralAnalysis(ds, binsperdec = 4, roi_dir = skydir, irf = 'P7SOURCE_V6', maxROI = 10, minROI = 10, event_class = 0) diffuse_sources = diffuse_sources = get_default_diffuse( diffdir=e('$FERMILANDE/diffuse'), gfile='ring_2year_P76_v0.fits', ifile='isotrop_2year_P76_source_v0.txt') catalogs = Catalog2FGL('$FERMILANDE/catalogs/gll_psc_v05.fit', latextdir='$FERMILANDE/extended_archives/gll_psc_v05_templates/', **catalog_dict) roi = sa.roi( catalogs = catalogs, diffuse_sources = diffuse_sources, **roi_dict) return roi
def setup_pointlike(name,pwnlist,phasing=True): """Name of the source pwnlist Yaml file phasing=true : apply phase cut phasing=false : don't do it""" sources=yaml.load(open(pwnlist)) catalog_name=sources[name]['catalog'] phase=sources[name]['phase'] ltcube=sources[name]['ltcube'] pulsar_position=SkyDir(*sources[name]['dir']) if phasing==True: phase_factor=phase[1]-phase[0] if phase[1]>phase[0] else (1-phase[0]) + (phase[1]-0) ft1=sources[name]['ft1'] else : phase_factor=1.0 raise Exception("Unable to phase data") catalog=FermiCatalog(e("$FERMI/catalogs/gll_psc_v02.fit"),free_radius=5) catalog_source=[i for i in catalog.get_sources(SkyDir(),180) if i.name==catalog_name][0] center=catalog_source.skydir from uw.like.pointspec import DataSpecification data_specification = DataSpecification( ft1files = ft1, ltcube = ltcube, binfile = "binned_%s.fits" % name) spectral_analysis = SpectralAnalysis(data_specification, binsperdec = 8, emin = 100, emax = 100000, irf = "P6_V3_DIFFUSE", roi_dir = center, maxROI = 10, minROI = 10) roi=LandeROI(spectral_analysis.roi( roi_dir=center, diffuse_sources=get_default_diffuse(diffdir=e("$FERMI/diffuse"), gfile="gll_iem_v02.fit", ifile="isotropic_iem_v02.txt"), catalogs = catalog, fit_emin = 100, fit_emax = 100000, catalog_include_radius = 20, phase_factor = phase_factor) ) roi.del_source(catalog_name) # make residual TS map # add in PWN Candidate source=PointSource( name=name, model=PowerLaw(p=[1,2]), skydir=pulsar_position ) source.model.set_flux(1e-7,emin=100,emax=100000) roi.add_source(source) return roi
def time_cut(roi, tstart, tstop, subdir, use_pointlike_ltcube, verbosity): """ Create a new ROI given a time cut. """ sa = roi.sa ds = sa.dataspec get_defaults=lambda obj: [k[0] for k in obj.defaults if not isinstance(k,str)] get_kwargs=lambda obj: {k:obj.__dict__[k] for k in get_defaults(obj)} ds_kwargs, sa_kwargs, roi_kwargs = map(get_kwargs,[ds,sa,roi]) point_sources = [i.copy() for i in roi.psm.point_sources] diffuse_sources = [i.copy() for i in roi.dsm.diffuse_sources] if sa_kwargs['tstart'] !=0 or sa_kwargs['tstop'] !=0: raise Exception("sanity check") # * cut ft1file on time using gtselect ft2files=roi.sa.pixeldata.ft2files if len(ft2files) > 1: raise Exception("...") ft2file=ft2files[0] evfile=Gtlike.make_evfile(roi,subdir) cut_evfile=join(subdir,"cut_ft1_%s_%s.fits" % (tstart, tstop)) if not exists(cut_evfile): if verbosity: print 'Running gtselect' gtselect=GtApp('gtselect', 'dataSubselector') gtselect.run(infile=evfile, outfile=cut_evfile, ra=0, dec=0, rad=180, tmin=tstart, tmax=tstop, emin=1, emax=1e7, zmax=180) else: if verbosity: print '... Skiping gtselect for %s to %s' % (tstart,tstop) ds_kwargs['ft1files'] = cut_evfile # * create new binfile and ltcube ds_kwargs['binfile'] = join(subdir,'binned_%s_%s.fits' % (tstart, tstop)) # save this to see if it has been phased by # the function uw.utilities.phasetools.phase_ltcube all_time_ltcube = ds_kwargs['ltcube'] new_ltcube = join(subdir,'ltcube_%s_%s.fits' % (tstart, tstop)) if not exists(new_ltcube): if verbosity: print 'Running gtltcube for %s to %s' % (tstart,tstop) if use_pointlike_ltcube: pointlike_ltcube(evfile=cut_evfile, scfile=ft2file, outfile=new_ltcube, dcostheta=0.025, binsz=1, zmax=roi.sa.zenithcut, cone_angle=roi.sa.exp_radius, dir=roi.roi_dir) else: gtltcube=GtApp('gtltcube', 'Likelihood') gtltcube.run(evfile=cut_evfile, scfile=ft2file, outfile=new_ltcube, dcostheta=0.025, binsz=1) else: if verbosity: print '... Skiping gtltcube for %s to %s' % (tstart,tstop) # next, check if ltcube is phased, kind of a kluge f = pyfits.open(all_time_ltcube) if f['exposure'].header.has_key('PHASE'): assert f['exposure'].header['PHASE'] == f['weighted_exposure'].header['PHASE'] # If so, phase new ltcube phase = f['exposure'].header['PHASE'] phased_ltcube = join(subdir,'phased_ltcube_%s_%s.fits' % (tstart, tstop)) if not exists(phased_ltcube): phase_ltcube(new_ltcube, phased_ltcube, phase) else: if verbosity: print '... Skiping gtltcube phasing for %s to %s' % (tstart,tstop) ds_kwargs['ltcube'] = phased_ltcube else: ds_kwargs['ltcube'] = new_ltcube # * create new ds, sa, and roi new_ds = DataSpecification(**ds_kwargs) sa = SpectralAnalysis(new_ds, **sa_kwargs) return sa.roi( point_sources = point_sources, diffuse_sources = diffuse_sources, **roi_kwargs)
def build_roi(self, name, fast): if fast: roi_size=5 binsperdec = 2 max_free=2 free_radius=2 else: roi_size = 10 binsperdec = 4 max_free=5 free_radius=5 catalog = Catalog2FGL('$FERMI/catalogs/gll_psc_v05.fit', latextdir='$FERMI/extended_archives/gll_psc_v05_templates', prune_radius=0, max_free=max_free, free_radius=free_radius, limit_parameters=True) ft1 = self.radiopsr_loader.get_ft1(name) ft2 = self.radiopsr_loader.get_ft2(name) ltcube = self.radiopsr_loader.get_ltcube(name) binfile = self.radiopsr_loader.get_binfile(name, binsperdec) roi_dir = self.radiopsr_loader.get_skydir(name) ds = DataSpecification( ft1files = ft1, ft2files = ft2, ltcube = ltcube, binfile = binfile) sa = SpectralAnalysis(ds, binsperdec = binsperdec, emin = 100, emax = 1000000, irf = "P7SOURCE_V6", roi_dir = roi_dir, maxROI = roi_size, minROI = roi_size, event_class= 0) fit_emin = 1e2 fit_emax = 10**5.5 model=PowerLaw(index=2, e0=np.sqrt(fit_emin*fit_emax)) model.set_limits('index',-5,5) ps = PointSource(name=name, model=model, skydir=roi_dir) point_sources = [ps] diffuse_sources = get_default_diffuse(diffdir="/afs/slac/g/glast/groups/diffuse/rings/2year", gfile="ring_2year_P76_v0.fits", ifile="isotrop_2year_P76_source_v0.txt", limit_parameters=True) roi=sa.roi(point_sources=point_sources, diffuse_sources=diffuse_sources, catalogs=catalog, fit_emin=fit_emin, fit_emax=fit_emax) return roi
skydir=start_dir) point_sources += [source1, source2] elif hypothesis != 'Background': # get out the spatial model with the name 'hypothesis' obj = eval("uw.like.SpatialModels.%s" % hypothesis) spatial_model = obj(center=start_dir, coordsystem=SkyDir.GALACTIC) spatial_model.limits[0:2] = N.asarray([[-1,1],[-1,1]]) source=ExtendedSource(name=name, model=start_spectral.copy(), spatial_model=spatial_model) diffuse_sources.append(source) roi = lande_roi.VerboseROI(sa.roi(point_sources = point_sources, diffuse_sources = diffuse_sources, fit_emin=args.emin, fit_emax=args.emax, catalogs=datafiles.catalog)) if args.modify_roi is not None: modify_roi=imp.load_source('modify_roi',args.modify_roi) modify_roi.modify_roi(name,roi) roi.print_summary() if args.convert_plaw: if hypothesis == 'TwoPoints': roi.modify(which='%s (first)' % name,model=PowerLaw(e0=N.sqrt(args.emin*args.emax))) roi.modify(which='%s (second)' % name,model=PowerLaw(e0=N.sqrt(args.emin*args.emax))) else: roi.modify(which=name,model=PowerLaw(e0=N.sqrt(args.emin*args.emax))) if args.prune_radius: # necessary to do here instead of in the FermiCatalog for the case Background
def get_roi(self, name, phase, fit_emin, fit_emax, binsperdec, extended=False, roi_size=10, catalog_kwargs=dict(), **kwargs): """ Sets up the ROI for studying a LAT Pulsar in the off pulse. """ sourcedict=yaml.load(open(self.pwndata))[name] ltcube=sourcedict['ltcube'] pulsar_position=SkyDir(*sourcedict['cel']) ft1=sourcedict['ft1'] ft2=sourcedict['ft2'] source = PWNRegion.get_source(name, position = pulsar_position, fit_emin = fit_emin, fit_emax = fit_emax, sigma = 0.1, extended=extended) sources = [source] roi_dir = pulsar_position phase = PhaseRange(phase) point_sources, diffuse_sources = [],[] for source in sources: if isinstance(source,PointSource): point_sources.append(source) else: diffuse_sources.append(source) diffuse_sources += PWNRegion.get_background() catalog=PWNRegion.get_catalog(**catalog_kwargs) binfile=join(self.savedir,'binned_phased.fits') phased_ltcube=PWNRegion.phase_ltcube(ltcube,phase,self.savedir) phased_ft1=PWNRegion.phase_ft1(ft1,phase,self.savedir) ds = DataSpecification( ft1files = phased_ft1, ft2files = ft2, ltcube = phased_ltcube, binfile = binfile) print 'For now, 4 bins per decade. Eventually, this will have to be better.' sa = SpectralAnalysis(ds, binsperdec = binsperdec, emin = 100, emax = 1000000, irf = "P7SOURCE_V6", roi_dir = roi_dir, maxROI = roi_size, minROI = roi_size, event_class= 0) roi=sa.roi(point_sources=point_sources, diffuse_sources=diffuse_sources, catalogs=catalog, phase_factor=1, fit_emin=fit_emin, fit_emax=fit_emax, **kwargs) print 'bins ',roi.bin_edges roi.extra = dict( unphased_ft1 = ft1, unphased_ltcube = ltcube, phase = phase) self.roi = roi return roi
ft2files = ft2, ltcube = ltcube, binfile = binfile) sa=SpectralAnalysis(ds, binsperdec = 8, irf = irf, roi_dir = skydir_mc, maxROI = 10, minROI = 10, event_class = 0, ) roi=sa.roi( roi_dir=skydir_mc, diffuse_sources=[bg.copy()], fit_emin = emin, fit_emax = emax ) roi.add_source(ps.copy()) roi.print_summary() roi.fit() roi.print_summary() try: roi.localize(which=point, update=True) except Exception, ex: traceback.print_exc(file=sys.stdout) roi.fit()
def setup_region(name,pwndata,phase, free_radius=5, tempdir=None, maxroi=10, xml=None, **kwargs): """Name of the source pwndata Yaml file returns pointlike ROI. """ phase = PhaseRange(phase) sources=yaml.load(open(pwndata)) catalog_name=sources[name]['catalog']['2fgl'] ltcube=sources[name]['ltcube'] pulsar_position=SkyDir(*sources[name]['dir']) ft2=sources[name]['ft2'] ft1=sources[name]['ft1'] catalog=FermiCatalog(e("$FERMI/catalogs/gll_psc_v02.fit")) catalog=Catalog2FGL('$FERMI/catalogs/gll_psc_v05.fit', latextdir='$FERMI/extended_archives/gll_psc_v05_templates', free_radius=free_radius) catalog_source=catalog.get_source(catalog_name) center=catalog_source.skydir if tempdir is None: tempdir=mkdtemp(prefix='/scratch/') binfile=j(tempdir,'binned_phased.fits') if np.allclose(phase.phase_fraction,1): phased_ltcube = ltcube phased_ft1 = ft1 else: # create a temporary ltcube scaled by the phase factor phased_ltcube=j(tempdir,'phased_ltcube.fits') phase_ltcube(ltcube,phased_ltcube, phase=phase) # apply phase cut to ft1 file phased_ft1 = j(tempdir,'ft1_phased.fits') phasetools.phase_cut(ft1,phased_ft1,phaseranges=phase.tolist(dense=False)) from uw.like.pointspec import DataSpecification ds = DataSpecification( ft1files = phased_ft1, ft2files = ft2, ltcube = phased_ltcube, binfile = binfile) sa = SpectralAnalysis(ds, binsperdec = 8, emin = 100, emax = 100000, irf = "P6_V11_DIFFUSE", roi_dir = center, maxROI = maxroi, minROI = maxroi) if xml is None: roi=sa.roi( diffuse_sources=get_default_diffuse(diffdir="/afs/slac/g/glast/groups/diffuse/mapcubes", gfile="gll_iem_v02.fit", ifile="isotropic_iem_v02.txt"), catalogs = catalog, phase_factor =1, **kwargs) else: roi=sa.roi_from_xml( roi_dir=center, xmlfile = xml, phase_factor =1, **kwargs) print 'bins ',roi.bin_edges roi.del_source(catalog_name) return roi
def time_cut(roi, tstart, tstop, subdir, use_pointlike_ltcube, verbosity): """ Create a new ROI given a time cut. """ sa = roi.sa ds = sa.dataspec get_defaults = lambda obj: [ k[0] for k in obj.defaults if not isinstance(k, str) ] get_kwargs = lambda obj: { k: obj.__dict__[k] for k in get_defaults(obj) } ds_kwargs, sa_kwargs, roi_kwargs = map(get_kwargs, [ds, sa, roi]) point_sources = [i.copy() for i in roi.psm.point_sources] diffuse_sources = [i.copy() for i in roi.dsm.diffuse_sources] if sa_kwargs['tstart'] != 0 or sa_kwargs['tstop'] != 0: raise Exception("sanity check") # * cut ft1file on time using gtselect ft2files = roi.sa.pixeldata.ft2files if len(ft2files) > 1: raise Exception("...") ft2file = ft2files[0] evfile = Gtlike.make_evfile(roi, subdir) cut_evfile = join(subdir, "cut_ft1_%s_%s.fits" % (tstart, tstop)) if not exists(cut_evfile): if verbosity: print 'Running gtselect' gtselect = GtApp('gtselect', 'dataSubselector') gtselect.run(infile=evfile, outfile=cut_evfile, ra=0, dec=0, rad=180, tmin=tstart, tmax=tstop, emin=1, emax=1e7, zmax=180) else: if verbosity: print '... Skiping gtselect for %s to %s' % (tstart, tstop) ds_kwargs['ft1files'] = cut_evfile # * create new binfile and ltcube ds_kwargs['binfile'] = join(subdir, 'binned_%s_%s.fits' % (tstart, tstop)) # save this to see if it has been phased by # the function uw.utilities.phasetools.phase_ltcube all_time_ltcube = ds_kwargs['ltcube'] new_ltcube = join(subdir, 'ltcube_%s_%s.fits' % (tstart, tstop)) if not exists(new_ltcube): if verbosity: print 'Running gtltcube for %s to %s' % (tstart, tstop) if use_pointlike_ltcube: pointlike_ltcube(evfile=cut_evfile, scfile=ft2file, outfile=new_ltcube, dcostheta=0.025, binsz=1, zmax=roi.sa.zenithcut, cone_angle=roi.sa.exp_radius, dir=roi.roi_dir) else: gtltcube = GtApp('gtltcube', 'Likelihood') gtltcube.run(evfile=cut_evfile, scfile=ft2file, outfile=new_ltcube, dcostheta=0.025, binsz=1) else: if verbosity: print '... Skiping gtltcube for %s to %s' % (tstart, tstop) # next, check if ltcube is phased, kind of a kluge f = pyfits.open(all_time_ltcube) if f['exposure'].header.has_key('PHASE'): assert f['exposure'].header['PHASE'] == f[ 'weighted_exposure'].header['PHASE'] # If so, phase new ltcube phase = f['exposure'].header['PHASE'] phased_ltcube = join(subdir, 'phased_ltcube_%s_%s.fits' % (tstart, tstop)) if not exists(phased_ltcube): phase_ltcube(new_ltcube, phased_ltcube, phase) else: if verbosity: print '... Skiping gtltcube phasing for %s to %s' % ( tstart, tstop) ds_kwargs['ltcube'] = phased_ltcube else: ds_kwargs['ltcube'] = new_ltcube # * create new ds, sa, and roi new_ds = DataSpecification(**ds_kwargs) sa = SpectralAnalysis(new_ds, **sa_kwargs) return sa.roi(point_sources=point_sources, diffuse_sources=diffuse_sources, **roi_kwargs)
print 'conv',conv_type sa = SpectralAnalysis(ds, emin = emin, emax = emax, binsperdec=binsperdec, roi_dir=roi_dir, minROI=roi_size, maxROI=roi_size, conv_type = conv_type, irf='P7SOURCE_V6') # finally, we can build the ROI for LAT analysis roi=sa.roi( catalogs=catalog, diffuse_sources=diffuse_sources, fit_emin = emin, fit_emax = emax, ) roi.plot_counts_map(filename='counts_before.pdf') # print out the ROI roi.print_summary(galactic=True) print roi roi.fit(fit_bg_first=True) roi.print_summary(galactic=True) print roi # Get out the paramters for Cas A and save them to a file
sa = SpectralAnalysis(ds, binsperdec=8, emin = args.emin, emax = args.emax, irf=args.irf, event_class=0, roi_dir=roi_dir, minROI=roi_size, maxROI=roi_size, use_weighted_livetime=True, zenithcut=100, ) roi = sa.roi(roi_dir=roi_dir, point_sources=point_sources, diffuse_sources=diffuse_sources, ) state = PointlikeState(roi) results = r = dict(argparse_kwargs(args)) mc=sourcedict(roi, name, save_TS=False, errors=False) roi.print_summary() roi.fit(use_gradient=False) roi.print_summary() fit=sourcedict(roi, name, save_TS=False) results['pointlike'] = dict(mc=mc, fit=fit)
def setup_pwn(name,pwndata,phase, free_radius=5, tempdir=None, **kwargs): """Name of the source pwndata Yaml file returns pointlike ROI. """ sources=yaml.load(open(pwndata)) catalog_name=sources[name]['catalog'] ltcube=sources[name]['ltcube'] pulsar_position=SkyDir(*sources[name]['dir']) ft2=sources[name]['ft2'] ft1=sources[name]['ft1'] # in case no list was passed if len(phase)==2 and isinstance(phase[0],numbers.Real) and \ isinstance(phase[1],numbers.Real): # write in case phase wraps around. if phase[0]>phase[1]: phase=[[phase[0],1.0],[0.0,phase[1]]] else: phase = [phase] phase_factor=get_phase_factor(phase) catalog=FermiCatalog(e("$FERMI/catalogs/gll_psc_v02.fit"),free_radius=free_radius) catalog_source=[i for i in catalog.get_sources(SkyDir(),180) if i.name==catalog_name][0] center=catalog_source.skydir if tempdir is None: tempdir=mkdtemp(prefix='/scratch/') binfile=j(tempdir,'binned_phased.fits') # apply phase cut to ft1 file phased_ft1 = j(tempdir,'ft1_phased.fits') phasetools.phase_cut(ft1,phased_ft1,phaseranges=phase) # create a temporary ltcube scaled by the phase factor # phased_ltcube=j(tempdir,'phased_ltcube.fits') # phase_ltcube(ltcube,phased_ltcube, phase=[0.0,1.0]) phased_ltcube=ltcube from uw.like.pointspec import DataSpecification data_specification = DataSpecification( ft1files = phased_ft1, ft2files = ft2, ltcube = phased_ltcube, binfile = binfile) spectral_analysis = SpectralAnalysis(data_specification, binsperdec = 4, emin = 100, emax = 100000, irf = "P6_V3_DIFFUSE", roi_dir = center, maxROI = 10, minROI = 10) roi=spectral_analysis.roi( roi_dir=center, diffuse_sources=get_default_diffuse(diffdir=e("$FERMI/diffuse"), gfile="gll_iem_v02.fit", ifile="isotropic_iem_v02.txt"), catalogs = catalog, phase_factor = phase_factor, **kwargs) # phaseing already done to the ltcube print "phase_factor=%.2f"%phase_factor # keep overall flux of catalog source, # but change the starting index to 2. roi.modify(which=catalog_name, name=name, index=2, keep_old_flux=True) roi.toXML(filename="essai") print roi roi.print_summary() for names in roi.get_names(): try : roi.modify(names,Norm=roi.get_model(names)[0]*roi.phase_factor) except : try : roi.modify(names,Int_flux=roi.get_model(names)[0]*roi.phase_factor) except : print names table=roi.get_names() print roi.modify(which=table[len(table)-2],model=PowerLaw(p=[1.0*phase_factor,0.1]),free=[True,False]) print roi.modify(which=table[len(table)-1],model=PowerLaw(p=[1.0*phase_factor,0.1]),free=[True,False]) # print roi.modify(which='eg_v02',free=[False]) print roi return roi
class AnalysisManager(Configurable): default_config = { 'convtype': -1, 'binsperdec': 4, 'savedir': None, 'scratchdir': None, 'target': None, 'evfile': None, 'scfile': None, 'ltcube': None, 'galdiff': None, 'isodiff': None, 'event_types': None, 'gtbin': None, 'catalog': '2FGL', 'optimizer': 'MINUIT', 'joint': None, 'irfs': None } def __init__(self, config=None, **kwargs): super(AnalysisManager, self).__init__() self.update_default_config(SelectorTask, group='select') self.configure(config, **kwargs) import pprint pprint.pprint(self.config) self._like = SummedLikelihood() @property def like(self): return self._like @property def logLike(self): return self._like.logLike def setup_roi(self, **kwargs): target_name = self.config['target'] cat = Catalog.get('2fgl') self.src = CatalogSource(cat.get_source_by_name(target_name)) if self.config['savedir'] is None: self.set_config('savedir', target_name) if not os.path.exists(self.config['savedir']): os.makedirs(self.config['savedir']) config = self.config self.savestate = os.path.join(config['savedir'], "%s_savestate.P" % target_name) self.ft1file = os.path.join(config['savedir'], "%s_ft1.fits" % target_name) self.binfile = os.path.join(config['savedir'], "%s_binfile.fits" % target_name) self.srcmdl = os.path.join(config['savedir'], "%s_srcmdl.xml" % target_name) self.srcmdl_fit = os.path.join(config['savedir'], "%s_srcmdl_fit.xml" % target_name) if os.path.isfile(config['ltcube']) and \ re.search('\.fits?',config['ltcube']): self.ltcube = config['ltcube'] else: ltcube = sorted(glob.glob(config['ltcube'])) self.ltcube = os.path.join(config['savedir'], "%s_ltcube.fits" % target_name) lt_task = LTSumTask(self.ltcube, infile1=ltcube, config=config) lt_task.run() self.evfile = config['evfile'] #sorted(glob.glob(config['evfile'])) # if len(self.evfile) > 1: # evfile_list = os.path.join(self.config('savedir'),'evfile.txt') # np.savetxt(evfile_list,self.evfile,fmt='%s') # self.evfile = os.path.abspath(evfile_list) # else: # self.evfile = self.evfile[0] # if len(self.ltfile) > 1: # ltfile_list = os.path.join(self.config('savedir'),'ltfile.txt') # np.savetxt(ltfile_list,self.ltfile,fmt='%s') # self.ltfile = os.path.abspath(ltfile_list) # else: # self.ltfile = self.ltfile[0] # print self.evfile # print self.ltfile self.skydir = SkyDir(self.src.ra, self.src.dec) sel_task = SelectorTask(self.evfile, self.ft1file, ra=self.src.ra, dec=self.src.dec, config=config['select'], overwrite=False) sel_task.run() cat.create_roi(self.src.ra, self.src.dec, config['isodiff'], config['galdiff'], self.srcmdl, radius=5.0) # self.setup_pointlike() self.components = [] for i, t in enumerate(self.config['joint']): print 'Setting up binned analysis ', i # kw = dict(irfs=None,isodiff=None) # kw.update(t) analysis = BinnedGtlike( self.src, target_name + '_%02i' % (i), config, evfile=self.ft1file, srcmdl=self.srcmdl, gtselect=dict(evclass=t['evclass'], evtype=t['evtype']), # convtype=t['convtype'], irfs=t['irfs'], isodiff=t['isodiff']) analysis.setup_inputs() analysis.setup_gtlike() self.components.append(analysis) self._like.addComponent(analysis.like) # for i, p in self.tied_pars.iteritems(): # print 'Tying parameters ', i, p # self.comp_like.tieParameters(p) self._like.energies = self.components[0].like.energies return for i, p in enumerate(self.components[0].like.params()): print i, p.srcName, p.getName() tied_params = [] for c in self.components: tied_params.append([c.like, p.srcName, p.getName()]) self.comp_like.tieParameters(tied_params) # self.tied_pars = {} # for x in self.components: # for s in x.like.sourceNames(): # p = x.like.normPar(s) # pidx = x.like.par_index(s,p.getName()) # if not pidx in self.tied_pars: # self.tied_pars[pidx] = [] # self.tied_pars[pidx].append([x.like,s,p.getName()]) # print s, p.getName() # self.norm_pars.append([x.like,s,p.getName()]) # self.norm_pars.append([self.analysis1.like,src,p.getName()]) def fit(self): saved_state = LikelihoodState(self.like) print 'Fitting model' self.like.fit(verbosity=2, covar=True) source_dict = gtlike_source_dict(self.like, self.src.name) import pprint pprint.pprint(source_dict) def write_xml_model(self): for c in self.components: c.write_model() # c.make_srcmodel() def make_source_model(self): for c in self.components: c.make_srcmodel() # def gtlike_results(self, **kwargs): # from lande.fermi.likelihood.save import source_dict # return source_dict(self.like, self.name, **kwargs) # def gtlike_summary(self): # from lande.fermi.likelihood.printing import gtlike_summary # return gtlike_summary(self.like,maxdist=self.config['radius']) def free_source(self, name, free=True): """ Free a source in the ROI source : string or pointlike source object free : boolean to free or fix parameter """ freePars = self.like.freePars(name) normPar = self.like.normPar(name).getName() idx = self.like.par_index(name, normPar) if not free: self.like.setFreeFlag(name, freePars, False) else: self.like[idx].setFree(True) self.like.syncSrcParams(name) def save(self): from util import save_object save_object(self, self.savestate) def setup_pointlike(self): if os.path.isfile(self.srcmdl): return config = self.config self._ds = DataSpecification(ft1files=self.ft1file, ft2files=config['scfile'], ltcube=self.ltcube, binfile=self.binfile) self._sa = SpectralAnalysis(self._ds, binsperdec=config['binsperdec'], emin=config['emin'], emax=config['emax'], irf=config['irfs'], roi_dir=self.skydir, maxROI=config['radius'], minROI=config['radius'], zenithcut=config['zmax'], event_class=0, conv_type=config['convtype']) sources = [] point_sources, diffuse_sources = [], [] galdiff = config['galdiff'] isodiff = config['isodiff'] bkg_sources = self.get_background(galdiff, isodiff) sources += filter(None, bkg_sources) catalog = self.get_catalog(config['catalog']) catalogs = filter(None, [catalog]) for source in sources: if isinstance(source, PointSource): point_sources.append(source) else: diffuse_sources.append(source) self._roi = self._sa.roi(roi_dir=self.skydir, point_sources=point_sources, diffuse_sources=diffuse_sources, catalogs=catalogs, fit_emin=config['emin'], fit_emax=config['emax']) # Create model file self._roi.toXML(self.srcmdl, convert_extended=True, expand_env_vars=True) @staticmethod def get_catalog(catalog=None, **kwargs): if catalog is None or isinstance(catalog, SourceCatalog): pass elif catalog == 'PSC3Y': catalog = Catalog3Y( '/u/ki/kadrlica/fermi/catalogs/PSC3Y/gll_psc3yearclean_v1_assoc_v6r1p0.fit', latextdir='/u/ki/kadrlica/fermi/catalogs/PSC3Y/', prune_radius=0, **kwargs) elif catalog == '2FGL': catalog = Catalog2FGL( '/u/ki/kadrlica/fermi/catalogs/2FGL/gll_psc_v08.fit', latextdir='/u/ki/kadrlica/fermi/catalogs/2FGL/Templates/', prune_radius=0, **kwargs) elif catalog == "1FGL": catalog = FermiCatalog( '/u/ki/kadrlica/fermi/catalogs/gll_psc_v02.fit', prune_radius=0, **kwargs) else: raise Exception("Unknown catalog: %s" % catalog) return catalog @staticmethod def get_background(galdiff=None, isodiff=None, limbdiff=None): """ Diffuse backgrounds galdiff: Galactic diffuse counts cube fits file isodiff: Isotropic diffuse spectral text file limbdiff: Limb diffuse counts map fits file """ backgrounds = [] if galdiff is None: gal = None else: gfile = os.path.basename(galdiff) gal = get_diffuse_source('MapCubeFunction', galdiff, 'PowerLaw', None, os.path.splitext(gfile)[0]) gal.smodel.set_default_limits() gal.smodel.freeze('index') backgrounds.append(gal) if isodiff is None: iso = None else: ifile = os.path.basename(isodiff) iso = get_diffuse_source('ConstantValue', None, 'FileFunction', isodiff, os.path.splitext(ifile)[0]) iso.smodel.set_default_limits() backgrounds.append(iso) if limbdiff is None: limb = None else: lfile = basename(limbdiff) dmodel = SpatialMap(limbdiff) smodel = PLSuperExpCutoff(norm=3.16e-6, index=0, cutoff=20.34, b=1, e0=200) limb = ExtendedSource(name=name, model=smodel, spatial_model=dmodel) for i in range(limb.smodel.npar): limb.smodel.freeze(i) backgrounds.append(limb) backgrounds.append(limb) return backgrounds
sa = SpectralAnalysis( ds, irf=fit_irf, roi_dir=skydir_mc, maxROI=10, minROI=10, event_class=0, # not this is necessary for MC data emin=emin, emax=emax, ) source_guess = PointSource(name=name_mc, skydir=skydir_mc, model=model_mc.copy()) roi = sa.roi( roi_dir=skydir_mc, diffuse_sources=[j.copy() for j in diffuse_sources], point_sources=[source_guess] ) print "bins = ", roi.bin_edges print "Fitting unmodified ROI" def fit(): try: roi.fit(use_gradient=True) except Exception, err: print "\n\n\n\nERROR FITTING: %s\n\n\n" % (str(err)) fit() try: roi.localize(which=name_mc, update=True, maxdist=5)
def setup_pwn(name,pwndata,phase, free_radius=5, tempdir=None, emin=1.0e2, emax=1.0e5,maxroi=10,model=None,**kwargs): """Name of the source pwndata Yaml file returns pointlike ROI. """ sources=yaml.load(open(pwndata)) catalog_name=sources[name]['catalog'] ltcube=sources[name]['ltcube'] pulsar_position=SkyDir(*sources[name]['dir']) ft2=sources[name]['ft2'] ft1=sources[name]['ft1'] # in case no list was passed if len(phase)==2 and isinstance(phase[0],numbers.Real) and \ isinstance(phase[1],numbers.Real): # write in case phase wraps around. if phase[0]>phase[1]: phase=[[phase[0],1.0],[0.0,phase[1]]] else: phase = [phase] phase_factor=get_phase_factor(phase) print "phase" print phase print "phase_factor=%.2f"%phase_factor catalog=FermiCatalog(e("$FERMI/catalogs/gll_psc_v02.fit"),free_radius=free_radius) catalog_source=[i for i in catalog.get_sources(SkyDir(),180) if i.name==catalog_name][0] center=catalog_source.skydir if tempdir is None: tempdir=mkdtemp(prefix='/scratch/') binfile=j(tempdir,'binned_phased.fits') # apply phase cut to ft1 file phased_ft1 = j(tempdir,'ft1_phased.fits') phasetools.phase_cut(ft1,phased_ft1,phaseranges=phase) # create a temporary ltcube scaled by the phase factor # phased_ltcube=j(tempdir,'phased_ltcube.fits') # phase_ltcube(ltcube,phased_ltcube, phase=phase) phased_ltcube=ltcube from uw.like.pointspec import DataSpecification data_specification = DataSpecification( ft1files = phased_ft1, ft2files = ft2, ltcube = phased_ltcube, binfile = binfile) spectral_analysis = SpectralAnalysis(data_specification, binsperdec = 4, emin = 100, emax = 100000, irf = "P6_V3_DIFFUSE", roi_dir = center, maxROI = maxroi, minROI = maxroi) if model == None : roi=spectral_analysis.roi( roi_dir=center, diffuse_sources=get_default_diffuse(diffdir=e("$FERMI/diffuse"), gfile="gll_iem_v02.fit", ifile="isotropic_iem_v02.txt"), catalogs = catalog, phase_factor = 1.0, fit_emin = [emin,emin], fit_emax = [emax,emax], **kwargs) else : roi=spectral_analysis.roi( roi_dir=center, xmlfile = model, phase_factor =1.0, fit_emin = [emin,emin], fit_emax = [emax,emax], **kwargs) print "---------------------Energy range--------------------" print "emin="+str(roi.bands[0].emin)+"\n" print "emax="+str(roi.bands[len(roi.bands)-1].emax)+"\n" # keep overall flux of catalog source, # but change the starting index to 2. roi.modify(which=catalog_name, name=name, index=2, keep_old_flux=True) return roi
ltcube = dict2fgl['ltcube'] ds = DataSpecification( ft1files = ft1, ft2files = ft2, ltcube = ltcube, binfile = binfile) emin=1e3 emax=1e5 sa = SpectralAnalysis(ds, emin = emin, emax = emax, irf='P7SOURCE_V6', roi_dir=roi_dir, minROI=10, maxROI=10, event_class=0 ) diffdir='/afs/slac/g/glast/groups/diffuse/rings/2year/' diffuse_sources = get_default_diffuse(diffdir=diffdir, gfile='ring_2year_P76_v0.fits', ifile='isotrop_2year_P76_source_v0.txt') roi = sa.roi(roi_dir=roi_dir, diffuse_sources = diffuse_sources) roi.plot_counts_map(filename='counts_map_%s.png' % extra, size=10*np.sqrt(2), pixelsize=0.5)
class AnalysisManager(Configurable): default_config = { 'convtype' : -1, 'binsperdec' : 4, 'savedir' : None, 'scratchdir' : None, 'target' : None, 'evfile' : None, 'scfile' : None, 'ltcube' : None, 'galdiff' : None, 'isodiff' : None, 'event_types': None, 'gtbin' : None, 'catalog' : '2FGL', 'optimizer' : 'MINUIT', 'joint' : None, 'irfs' : None } def __init__(self,config=None,**kwargs): super(AnalysisManager,self).__init__() self.update_default_config(SelectorTask,group='select') self.configure(config,**kwargs) import pprint pprint.pprint(self.config) self._like = SummedLikelihood() @property def like(self): return self._like @property def logLike(self): return self._like.logLike def setup_roi(self,**kwargs): target_name = self.config['target'] cat = Catalog.get('2fgl') self.src = CatalogSource(cat.get_source_by_name(target_name)) if self.config['savedir'] is None: self.set_config('savedir',target_name) if not os.path.exists(self.config['savedir']): os.makedirs(self.config['savedir']) config = self.config self.savestate = os.path.join(config['savedir'], "%s_savestate.P"%target_name) self.ft1file = os.path.join(config['savedir'], "%s_ft1.fits"%target_name) self.binfile = os.path.join(config['savedir'], "%s_binfile.fits"%target_name) self.srcmdl = os.path.join(config['savedir'], "%s_srcmdl.xml"%target_name) self.srcmdl_fit = os.path.join(config['savedir'], "%s_srcmdl_fit.xml"%target_name) if os.path.isfile(config['ltcube']) and \ re.search('\.fits?',config['ltcube']): self.ltcube = config['ltcube'] else: ltcube = sorted(glob.glob(config['ltcube'])) self.ltcube = os.path.join(config['savedir'], "%s_ltcube.fits"%target_name) lt_task = LTSumTask(self.ltcube,infile1=ltcube, config=config) lt_task.run() self.evfile = config['evfile']#sorted(glob.glob(config['evfile'])) # if len(self.evfile) > 1: # evfile_list = os.path.join(self.config('savedir'),'evfile.txt') # np.savetxt(evfile_list,self.evfile,fmt='%s') # self.evfile = os.path.abspath(evfile_list) # else: # self.evfile = self.evfile[0] # if len(self.ltfile) > 1: # ltfile_list = os.path.join(self.config('savedir'),'ltfile.txt') # np.savetxt(ltfile_list,self.ltfile,fmt='%s') # self.ltfile = os.path.abspath(ltfile_list) # else: # self.ltfile = self.ltfile[0] # print self.evfile # print self.ltfile self.skydir = SkyDir(self.src.ra,self.src.dec) sel_task = SelectorTask(self.evfile,self.ft1file, ra=self.src.ra,dec=self.src.dec, config=config['select'],overwrite=False) sel_task.run() cat.create_roi(self.src.ra,self.src.dec, config['isodiff'], config['galdiff'], self.srcmdl,radius=5.0) # self.setup_pointlike() self.components = [] for i, t in enumerate(self.config['joint']): print 'Setting up binned analysis ', i # kw = dict(irfs=None,isodiff=None) # kw.update(t) analysis = BinnedGtlike(self.src, target_name + '_%02i'%(i), config, evfile=self.ft1file, srcmdl=self.srcmdl, gtselect=dict(evclass=t['evclass'], evtype=t['evtype']), # convtype=t['convtype'], irfs=t['irfs'], isodiff=t['isodiff']) analysis.setup_inputs() analysis.setup_gtlike() self.components.append(analysis) self._like.addComponent(analysis.like) # for i, p in self.tied_pars.iteritems(): # print 'Tying parameters ', i, p # self.comp_like.tieParameters(p) self._like.energies = self.components[0].like.energies return for i, p in enumerate(self.components[0].like.params()): print i, p.srcName, p.getName() tied_params = [] for c in self.components: tied_params.append([c.like,p.srcName,p.getName()]) self.comp_like.tieParameters(tied_params) # self.tied_pars = {} # for x in self.components: # for s in x.like.sourceNames(): # p = x.like.normPar(s) # pidx = x.like.par_index(s,p.getName()) # if not pidx in self.tied_pars: # self.tied_pars[pidx] = [] # self.tied_pars[pidx].append([x.like,s,p.getName()]) # print s, p.getName() # self.norm_pars.append([x.like,s,p.getName()]) # self.norm_pars.append([self.analysis1.like,src,p.getName()]) def fit(self): saved_state = LikelihoodState(self.like) print 'Fitting model' self.like.fit(verbosity=2, covar=True) source_dict = gtlike_source_dict(self.like,self.src.name) import pprint pprint.pprint(source_dict) def write_xml_model(self): for c in self.components: c.write_model() # c.make_srcmodel() def make_source_model(self): for c in self.components: c.make_srcmodel() # def gtlike_results(self, **kwargs): # from lande.fermi.likelihood.save import source_dict # return source_dict(self.like, self.name, **kwargs) # def gtlike_summary(self): # from lande.fermi.likelihood.printing import gtlike_summary # return gtlike_summary(self.like,maxdist=self.config['radius']) def free_source(self,name,free=True): """ Free a source in the ROI source : string or pointlike source object free : boolean to free or fix parameter """ freePars = self.like.freePars(name) normPar = self.like.normPar(name).getName() idx = self.like.par_index(name, normPar) if not free: self.like.setFreeFlag(name, freePars, False) else: self.like[idx].setFree(True) self.like.syncSrcParams(name) def save(self): from util import save_object save_object(self,self.savestate) def setup_pointlike(self): if os.path.isfile(self.srcmdl): return config = self.config self._ds = DataSpecification(ft1files = self.ft1file, ft2files = config['scfile'], ltcube = self.ltcube, binfile = self.binfile) self._sa = SpectralAnalysis(self._ds, binsperdec = config['binsperdec'], emin = config['emin'], emax = config['emax'], irf = config['irfs'], roi_dir = self.skydir, maxROI = config['radius'], minROI = config['radius'], zenithcut = config['zmax'], event_class= 0, conv_type = config['convtype']) sources = [] point_sources, diffuse_sources = [],[] galdiff = config['galdiff'] isodiff = config['isodiff'] bkg_sources = self.get_background(galdiff,isodiff) sources += filter(None, bkg_sources) catalog = self.get_catalog(config['catalog']) catalogs = filter(None, [catalog]) for source in sources: if isinstance(source,PointSource): point_sources.append(source) else: diffuse_sources.append(source) self._roi=self._sa.roi(roi_dir=self.skydir, point_sources=point_sources, diffuse_sources=diffuse_sources, catalogs=catalogs, fit_emin=config['emin'], fit_emax=config['emax']) # Create model file self._roi.toXML(self.srcmdl, convert_extended=True, expand_env_vars=True) @staticmethod def get_catalog(catalog=None, **kwargs): if catalog is None or isinstance(catalog,SourceCatalog): pass elif catalog == 'PSC3Y': catalog = Catalog3Y('/u/ki/kadrlica/fermi/catalogs/PSC3Y/gll_psc3yearclean_v1_assoc_v6r1p0.fit', latextdir='/u/ki/kadrlica/fermi/catalogs/PSC3Y/', prune_radius=0, **kwargs) elif catalog == '2FGL': catalog = Catalog2FGL('/u/ki/kadrlica/fermi/catalogs/2FGL/gll_psc_v08.fit', latextdir='/u/ki/kadrlica/fermi/catalogs/2FGL/Templates/', prune_radius=0, **kwargs) elif catalog == "1FGL": catalog = FermiCatalog('/u/ki/kadrlica/fermi/catalogs/gll_psc_v02.fit', prune_radius=0, **kwargs) else: raise Exception("Unknown catalog: %s"%catalog) return catalog @staticmethod def get_background(galdiff=None, isodiff=None, limbdiff=None): """ Diffuse backgrounds galdiff: Galactic diffuse counts cube fits file isodiff: Isotropic diffuse spectral text file limbdiff: Limb diffuse counts map fits file """ backgrounds = [] if galdiff is None: gal=None else: gfile = os.path.basename(galdiff) gal = get_diffuse_source('MapCubeFunction',galdiff, 'PowerLaw',None, os.path.splitext(gfile)[0]) gal.smodel.set_default_limits() gal.smodel.freeze('index') backgrounds.append(gal) if isodiff is None: iso=None else: ifile = os.path.basename(isodiff) iso = get_diffuse_source('ConstantValue',None,'FileFunction' ,isodiff, os.path.splitext(ifile)[0]) iso.smodel.set_default_limits() backgrounds.append(iso) if limbdiff is None: limb=None else: lfile = basename(limbdiff) dmodel = SpatialMap(limbdiff) smodel = PLSuperExpCutoff(norm=3.16e-6,index=0, cutoff=20.34,b=1,e0=200) limb = ExtendedSource(name=name,model=smodel,spatial_model=dmodel) for i in range(limb.smodel.npar): limb.smodel.freeze(i) backgrounds.append(limb) backgrounds.append(limb) return backgrounds
def get_roi(self, name, phase, fit_emin, fit_emax, binsperdec, extended=False, roi_size=10, catalog_kwargs=dict(), **kwargs): """ Sets up the ROI for studying a LAT Pulsar in the off pulse. """ sourcedict = yaml.load(open(self.pwndata))[name] ltcube = sourcedict['ltcube'] pulsar_position = SkyDir(*sourcedict['cel']) ft1 = sourcedict['ft1'] ft2 = sourcedict['ft2'] source = PWNRegion.get_source(name, position=pulsar_position, fit_emin=fit_emin, fit_emax=fit_emax, sigma=0.1, extended=extended) sources = [source] roi_dir = pulsar_position phase = PhaseRange(phase) point_sources, diffuse_sources = [], [] for source in sources: if isinstance(source, PointSource): point_sources.append(source) else: diffuse_sources.append(source) diffuse_sources += PWNRegion.get_background() catalog = PWNRegion.get_catalog(**catalog_kwargs) binfile = join(self.savedir, 'binned_phased.fits') phased_ltcube = PWNRegion.phase_ltcube(ltcube, phase, self.savedir) phased_ft1 = PWNRegion.phase_ft1(ft1, phase, self.savedir) ds = DataSpecification(ft1files=phased_ft1, ft2files=ft2, ltcube=phased_ltcube, binfile=binfile) print 'For now, 4 bins per decade. Eventually, this will have to be better.' sa = SpectralAnalysis(ds, binsperdec=binsperdec, emin=100, emax=1000000, irf="P7SOURCE_V6", roi_dir=roi_dir, maxROI=roi_size, minROI=roi_size, event_class=0) roi = sa.roi(point_sources=point_sources, diffuse_sources=diffuse_sources, catalogs=catalog, phase_factor=1, fit_emin=fit_emin, fit_emax=fit_emax, **kwargs) print 'bins ', roi.bin_edges roi.extra = dict(unphased_ft1=ft1, unphased_ltcube=ltcube, phase=phase) self.roi = roi return roi