def roi_circle(roi_index, galactic=True, radius=5.0): """ return (lon,lat,radius) tuple for given nside=12 position """ from skymaps import Band sdir = Band(12).dir(roi_index) return (sdir.l(), sdir.b(), radius) if galactic else (sdir.ra(), sdir.dec(), radius)
def setup(self, **kwargs): self.plotfolder = 'rois' self.title = 'ROI summary' self.source_name = 'observed' # default for base class self.plots_kw = {} filename = 'rois.pickle' refresh = kwargs.pop( 'refresh', not os.path.exists(filename) or os.path.getmtime(filename) < os.path.getmtime('pickle.zip')) if refresh: files, pkls = self.load_pickles('pickle') assert len( files ) == 1728, 'Expected to find 1728 files, found %d' % len(files) rdict = dict() exclude = ('sources', 'name') for i, pkl in enumerate(pkls): skydir = Band(12).dir(i) tdict = dict((key, item) for key, item in pkl.items() if key not in exclude) tdict.update(sources=pkl['sources'].keys()) glon = skydir.l() if glon > 180: glon -= 360. glat = skydir.b() ra = skydir.ra() dec = skydir.dec() tdict.update(glon=glon, glat=glat, ra=ra, dec=dec) rdict[i] = tdict self.df = pd.DataFrame(rdict).transpose() self.df.to_pickle(filename) print 'saved %s' % filename else: print 'loading %s' % filename self.df = pd.read_pickle(filename) # move this into refresh? rois = self.df rx = rois['ra dec glat glon'.split()].copy() try: rx['chisq'] = [r['chisq'] for r in rois['counts']] except: print '***Failed to find counts, skip creating rois.csv' return rx['npar'] = [len(p) for p in rois.parameters] rx.index.name = 'name' ### ?? #rx['ring'] = [10**p[0] for p in rois.parameters] #rx['iso'] = [10**p[1] for p in rois.parameters] rx.to_csv('rois.csv') print 'saved rois.csv' self.energy = self.df.ix[0]['counts']['energies'] self.funcs = [] self.fnames = []
def __call__(self, roi_index, ncomp=None, overwrite=False, **kwargs): """Setup gtanalysis with a pointlike ROI """ self.roi_index = roi_index roidir = Band(12).dir(roi_index) logger = self.logger gt_config = yaml.load( config_template.format( ltcube=self.ltcube, ft1='', irf=self.irf, galdiff=self.galdiff, components=self.components if ncomp is None else self.components[:ncomp], # to 100 GeV ra=roidir.ra(), dec=roidir.dec(), bexpmap=self.bexpmap_dir + '/bexpmap%s.fits', ccube=self.ccube_root + '/{:04d}/ccube%s.fits'.format(roi_index), #srcmdl='srcmdl.xml', # same for all components )) self.gtcfg = gt_config # Check and/or setup the ccube files assert os.path.exists( self.ccube_root), 'Expected to find folder {}'.format( self.cube_root) roi_root = os.path.join(self.ccube_root, str(roi_index)) if not os.path.exists(roi_root): os.mkdir(roi_root) clist = range(len(gt_config['components'])) ccfiles = [os.path.join(self.ccube_root,\ '{:04d}'.format(roi_index), gt_config['gtlike']['ccube']% '_{:02d}'.format(j) )for j in clist] ok = np.all(map(os.path.exists, ccfiles)) if not ok: logger.info('Need to generate ccfiles') bf = binned_data.BinFile(self.uwcfg.dataset.binfile) bf.generate_ccube_files(self.ccube_root, roi_index, channels=clist, overwrite=overwrite) # Check for bexpmap assert os.path.exists( self.bexpmap_dir), 'expected to find folder {}'.format( self.bexpmap_dir) self.create_expcubes(gt_config['components'], overwrite=overwrite) logger.info('Create the gtanalysis') self.gta = gtanalysis.GTAnalysis(gt_config, **kwargs) # Check for srcmaps logger.info('Checking srcmap files') srcmap_ok = [os.path.exists(c.files['srcmap']) for c in self.gta] if np.any(np.logical_not(srcmap_ok)) or overwrite: logger.info('Will create srcmap files') try: self.create_srcmaps(overwrite) except Exception, msg: logger.error('Failed : {}. Returning anyway'.format(msg)) return self.gta