def residmap(self, prefix='', **kwargs): """Generate 2-D spatial residual maps using the current ROI model and the convolution kernel defined with the `model` argument. Parameters ---------- prefix : str String that will be prefixed to the output residual map files. {options} Returns ------- maps : dict A dictionary containing the `~fermipy.utils.Map` objects for the residual significance and amplitude. """ timer = Timer.create(start=True) self.logger.info('Generating residual maps') schema = ConfigSchema(self.defaults['residmap']) config = schema.create_config(self.config['residmap'], **kwargs) # Defining default properties of test source model config['model'].setdefault('Index', 2.0) config['model'].setdefault('SpectrumType', 'PowerLaw') config['model'].setdefault('SpatialModel', 'PointSource') config['model'].setdefault('Prefactor', 1E-13) o = self._make_residual_map(prefix, **config) if config['make_plots']: plotter = plotting.AnalysisPlotter(self.config['plotting'], fileio=self.config['fileio'], logging=self.config['logging']) plotter.make_residmap_plots(o, self.roi) self.logger.info('Finished residual maps') outfile = utils.format_filename(self.workdir, 'residmap', prefix=[o['name']]) if config['write_fits']: o['file'] = os.path.basename(outfile) + '.fits' self._make_residmap_fits(o, outfile + '.fits') if config['write_npy']: np.save(outfile + '.npy', o) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return o
def residmap(self, prefix='', **kwargs): """Generate 2-D spatial residual maps using the current ROI model and the convolution kernel defined with the `model` argument. Parameters ---------- prefix : str String that will be prefixed to the output residual map files. {options} Returns ------- maps : dict A dictionary containing the `~fermipy.utils.Map` objects for the residual significance and amplitude. """ timer = Timer.create(start=True) self.logger.info('Generating residual maps') schema = ConfigSchema(self.defaults['residmap']) config = schema.create_config(self.config['residmap'], **kwargs) # Defining default properties of test source model config['model'].setdefault('Index', 2.0) config['model'].setdefault('SpectrumType', 'PowerLaw') config['model'].setdefault('SpatialModel', 'PointSource') config['model'].setdefault('Prefactor', 1E-13) o = self._make_residual_map(prefix, **config) if config['make_plots']: plotter = plotting.AnalysisPlotter(self.config['plotting'], fileio=self.config['fileio'], logging=self.config['logging']) plotter.make_residmap_plots(o, self.roi) self.logger.info('Finished residual maps') outfile = utils.format_filename(self.workdir, 'residmap', prefix=[o['name']]) if config['write_fits']: o['file'] = os.path.basename(outfile) + '.fits' self._make_residmap_fits(o, outfile + '.fits') if config['write_npy']: np.save(outfile + '.npy', o) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return o
def lightcurve(self, name, **kwargs): """Generate a lightcurve for the named source. The function will complete the basic analysis steps for each bin and perform a likelihood fit for each bin. Extracted values (along with errors) are Integral Flux, spectral model, Spectral index, TS value, pred. # of photons. Note: successful calculation of TS:subscript:`var` requires at least one free background parameter and a previously optimized ROI model. Parameters --------- name: str source name {options} Returns --------- LightCurve : dict Dictionary containing output of the LC analysis """ name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['lightcurve'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') config = utils.create_dict(self.config['lightcurve'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing Lightcurve for %s' % name) o = self._make_lc(name, **config) filename = utils.format_filename( self.workdir, 'lightcurve', prefix=[config['prefix'], name.lower().replace(' ', '_')]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(filename) + '.fits' self._make_lc_fits(o, filename + '.fits', **config) if config['write_npy']: np.save(filename + '.npy', o) self.logger.info('Finished Lightcurve') return o
def lightcurve(self, name, **kwargs): """Generate a lightcurve for the named source. The function will complete the basic analysis steps for each bin and perform a likelihood fit for each bin. Extracted values (along with errors) are Integral Flux, spectral model, Spectral index, TS value, pred. # of photons. Note: successful calculation of TS:subscript:`var` requires at least one free background parameter and a previously optimized ROI model. Parameters --------- name: str source name {options} Returns --------- LightCurve : dict Dictionary containing output of the LC analysis """ name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['lightcurve'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') config = utils.create_dict(self.config['lightcurve'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing Lightcurve for %s' % name) o = self._make_lc(name, **config) filename = utils.format_filename(self.workdir, 'lightcurve', prefix=[config['prefix'], name.lower().replace(' ', '_')]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(filename) + '.fits' self._make_lc_fits(o, filename + '.fits', **config) if config['write_npy']: np.save(filename + '.npy', o) self.logger.info('Finished Lightcurve') return o
def tsmap(self, prefix='', **kwargs): """Generate a spatial TS map for a source component with properties defined by the `model` argument. The TS map will have the same geometry as the ROI. The output of this method is a dictionary containing `~fermipy.skymap.Map` objects with the TS and amplitude of the best-fit test source. By default this method will also save maps to FITS files and render them as image files. This method uses a simplified likelihood fitting implementation that only fits for the normalization of the test source. Before running this method it is recommended to first optimize the ROI model (e.g. by running :py:meth:`~fermipy.gtanalysis.GTAnalysis.optimize`). Parameters ---------- prefix : str Optional string that will be prepended to all output files. {options} Returns ------- tsmap : dict A dictionary containing the `~fermipy.skymap.Map` objects for TS and source amplitude. """ timer = Timer.create(start=True) schema = ConfigSchema(self.defaults['tsmap']) schema.add_option('loglevel', logging.INFO) schema.add_option('map_skydir', None, '', astropy.coordinates.SkyCoord) schema.add_option('map_size', 1.0) schema.add_option('threshold', 1E-2, '', float) schema.add_option('use_pylike', True, '', bool) schema.add_option('outfile', None, '', str) config = schema.create_config(self.config['tsmap'], **kwargs) # Defining default properties of test source model config['model'].setdefault('Index', 2.0) config['model'].setdefault('SpectrumType', 'PowerLaw') config['model'].setdefault('SpatialModel', 'PointSource') self.logger.log(config['loglevel'], 'Generating TS map') o = self._make_tsmap_fast(prefix, **config) if config['make_plots']: plotter = plotting.AnalysisPlotter(self.config['plotting'], fileio=self.config['fileio'], logging=self.config['logging']) plotter.make_tsmap_plots(o, self.roi) self.logger.log(config['loglevel'], 'Finished TS map') outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename(self.workdir, 'tsmap', prefix=[o['name']]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) if config['write_fits']: o['file'] = os.path.basename(outfile) + '.fits' self._make_tsmap_fits(o, outfile + '.fits') if config['write_npy']: np.save(outfile + '.npy', o) self.logger.log(config['loglevel'], 'Execution time: %.2f s', timer.elapsed_time) return o
def _make_ts_cube(self, prefix, **kwargs): skywcs = kwargs.get('wcs', self.geom.wcs) npix = kwargs.get('npix', self.npix) galactic = wcs_utils.is_galactic(skywcs) ref_skydir = wcs_utils.wcs_to_skydir(skywcs) refdir = pyLike.SkyDir(ref_skydir.ra.deg, ref_skydir.dec.deg) pixsize = np.abs(skywcs.wcs.cdelt[0]) skyproj = pyLike.FitScanner.buildSkyProj(str("AIT"), refdir, pixsize, npix, galactic) src_dict = copy.deepcopy(kwargs.setdefault('model', {})) src_dict = {} if src_dict is None else src_dict xpix, ypix = (np.round( (self.npix - 1.0) / 2.), np.round((self.npix - 1.0) / 2.)) skydir = wcs_utils.pix_to_skydir(xpix, ypix, skywcs) src_dict['ra'] = skydir.ra.deg src_dict['dec'] = skydir.dec.deg src_dict.setdefault('SpatialModel', 'PointSource') src_dict.setdefault('SpatialWidth', 0.3) src_dict.setdefault('Index', 2.0) src_dict.setdefault('Prefactor', 1E-13) src_dict['name'] = 'tscube_testsource' src = Source.create_from_dict(src_dict) modelname = utils.create_model_name(src) optFactory = pyLike.OptimizerFactory_instance() optObject = optFactory.create(str("MINUIT"), self.components[0].like.logLike) pylike_src = self.components[0]._create_source(src) fitScanner = pyLike.FitScanner(self.like.composite, optObject, skyproj, npix, npix) pylike_src.spectrum().normPar().setBounds(0, 1E6) fitScanner.setTestSource(pylike_src) self.logger.info("Running tscube") outfile = utils.format_filename(self.config['fileio']['workdir'], 'tscube.fits', prefix=[prefix]) try: fitScanner.run_tscube(True, kwargs['do_sed'], kwargs['nnorm'], kwargs['norm_sigma'], kwargs['cov_scale_bb'], kwargs['cov_scale'], kwargs['tol'], kwargs['max_iter'], kwargs['tol_type'], kwargs['remake_test_source'], kwargs['st_scan_level'], str(''), kwargs['init_lambda']) except Exception: fitScanner.run_tscube(True, kwargs['do_sed'], kwargs['nnorm'], kwargs['norm_sigma'], kwargs['cov_scale_bb'], kwargs['cov_scale'], kwargs['tol'], kwargs['max_iter'], kwargs['tol_type'], kwargs['remake_test_source'], kwargs['st_scan_level']) self.logger.info("Writing FITS output") fitScanner.writeFitsFile(str(outfile), str("gttscube")) convert_tscube(str(outfile), str(outfile)) tscube = castro.TSCube.create_from_fits(outfile) ts_map = tscube.tsmap norm_map = tscube.normmap npred_map = copy.deepcopy(norm_map) npred_map.data *= tscube.refSpec.ref_npred.sum() amp_map = copy.deepcopy(norm_map) amp_map.data *= src_dict['Prefactor'] sqrt_ts_map = copy.deepcopy(ts_map) sqrt_ts_map.data[...] = np.abs(sqrt_ts_map.data)**0.5 o = { 'name': utils.join_strings([prefix, modelname]), 'src_dict': copy.deepcopy(src_dict), 'file': os.path.basename(outfile), 'ts': ts_map, 'sqrt_ts': sqrt_ts_map, 'npred': npred_map, 'amplitude': amp_map, 'config': kwargs, 'tscube': tscube } if not kwargs['write_fits']: os.remove(outfile) os['file'] = None self.logger.info("Done") return o
def localize(self, name, **kwargs): """Find the best-fit position of a source. Localization is performed in two steps. First a TS map is computed centered on the source with half-width set by ``dtheta_max``. A fit is then performed to the maximum TS peak in this map. The source position is then further refined by scanning the likelihood in the vicinity of the peak found in the first step. The size of the scan region is set to encompass the 99% positional uncertainty contour as determined from the peak fit. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- localize : dict Dictionary containing results of the localization analysis. """ name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['localize'], optimizer=self.defaults['optimizer']) schema.add_option('use_cache', True) schema.add_option('prefix', '') config = utils.create_dict(self.config['localize'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running localization for %s' % name) free_state = FreeParameterState(self) loc = self._localize(name, **config) free_state.restore() self.logger.info('Finished localization.') if config['make_plots']: self._plotter.make_localization_plots(loc, self.roi, prefix=config['prefix']) outfile = \ utils.format_filename(self.workdir, 'loc', prefix=[config['prefix'], name.lower().replace(' ', '_')]) if config['write_fits']: loc['file'] = os.path.basename(outfile) + '.fits' self._make_localize_fits(loc, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', dict(loc)) return loc
def _make_ts_cube(self, prefix, **kwargs): map_skydir = kwargs.get('map_skydir') map_size = kwargs.get('map_size') exclude = kwargs.get('exclude', []) # We take the coordinate system and the bin size from the underlying map skywcs = self._geom.wcs galactic = wcs_utils.is_galactic(skywcs) pixsize = max(np.abs(skywcs.wcs.cdelt)) # If the map_size is not specified we need to find the right number of pixels if map_size is None: npix = max(self._geom.npix)[0] map_size = pixsize * npix else: npix = int(np.round(map_size / pixsize)) saved_state = LikelihoodState(self.like) for ex_src in exclude: self.zero_source(ex_src) if map_skydir is None: # Take the center of the wcs map_geom = self._geom.to_image() frame = coordsys_to_frame(map_geom.coordsys) map_skydir = SkyCoord(*map_geom.pix_to_coord( self._geom.wcs.wcs.crpix), frame=frame, unit='deg') map_skydir = map_skydir.transform_to('icrs') refdir = pyLike.SkyDir(map_skydir.ra.deg, map_skydir.dec.deg) src_dict = copy.deepcopy(kwargs.setdefault('model', {})) src_dict = {} if src_dict is None else src_dict src_dict['ra'] = map_skydir.ra.deg src_dict['dec'] = map_skydir.dec.deg src_dict.setdefault('SpatialModel', 'PointSource') src_dict.setdefault('SpatialWidth', 0.3) src_dict.setdefault('Index', 2.0) src_dict.setdefault('Prefactor', 1.0) src_dict['name'] = 'tscube_testsource' src = Source.create_from_dict(src_dict) if 'Prefactor' in src.spectral_pars: src.spectral_pars['Prefactor']['scale'] = 1.0e-10 modelname = utils.create_model_name(src) pylike_src = self.components[0]._create_source(src) pylike_src.spectrum().normPar().setBounds(0, 1E6) skyproj = pyLike.FitScanner.buildSkyProj(str("AIT"), refdir, pixsize, npix, galactic) optFactory = pyLike.OptimizerFactory_instance() optObject = optFactory.create(str("MINUIT"), self.like.composite) fitScanner = pyLike.FitScanner(self.like.composite, optObject, skyproj, npix, npix) fitScanner.set_quiet(True) fitScanner.setTestSource(pylike_src) self.logger.info("Running tscube") outfile = utils.format_filename(self.config['fileio']['workdir'], 'tscube.fits', prefix=[prefix]) fitScanner.run_tscube(True, kwargs['do_sed'], kwargs['nnorm'], kwargs['norm_sigma'], kwargs['cov_scale_bb'], kwargs['cov_scale'], kwargs['tol'], kwargs['max_iter'], kwargs['tol_type'], kwargs['remake_test_source'], kwargs['st_scan_level'], str(''), kwargs['init_lambda']) self.logger.info("Writing FITS output") fitScanner.writeFitsFile(str(outfile), str("gttscube"), "", False, pyLike.FitScanner.TSMAP_ONLY) saved_state.restore() convert_tscube(str(outfile), str(outfile)) tscube = castro.TSCube.create_from_fits(outfile) ts_map = tscube.tsmap norm_map = tscube.normmap npred_map = copy.deepcopy(norm_map) npred_map.data *= tscube.refSpec.ref_npred.sum() amp_map = copy.deepcopy(norm_map) amp_map.data *= pylike_src.spectrum().normPar().getValue() sqrt_ts_map = copy.deepcopy(ts_map) sqrt_ts_map.data[...] = np.abs(sqrt_ts_map.data)**0.5 o = { 'name': utils.join_strings([prefix, modelname]), 'src_dict': copy.deepcopy(src_dict), 'file': os.path.basename(outfile), 'ts': ts_map, 'sqrt_ts': sqrt_ts_map, 'npred': npred_map, 'amplitude': amp_map, 'config': kwargs, 'tscube': tscube } if not kwargs['write_fits']: os.remove(outfile) o['file'] = None self.logger.info("Done") return o
def extension(self, name, **kwargs): """Test this source for spatial extension with the likelihood ratio method (TS_ext). This method will substitute an extended spatial model for the given source and perform a one-dimensional scan of the spatial extension parameter over the range specified with the width parameters. The 1-D profile likelihood is then used to compute the best-fit value, upper limit, and TS for extension. The nuisance parameters that will be simultaneously fit when performing the spatial scan can be controlled with the ``fix_shape``, ``free_background``, and ``free_radius`` options. By default the position of the source will be fixed to its current position. A simultaneous fit to position and extension can be performed by setting ``fit_position`` to True. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- extension : dict Dictionary containing results of the extension analysis. The same dictionary is also saved to the dictionary of this source under 'extension'. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['extension'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('outfile', None, '', str) config = utils.create_dict(self.config['extension'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running extension fit for %s', name) free_state = FreeParameterState(self) ext = self._extension(name, **config) free_state.restore() self.logger.info('Finished extension fit.') if config['make_plots']: self._plotter.make_extension_plots(ext, self.roi, prefix=config['prefix']) outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename( self.workdir, 'ext', prefix=[config['prefix'], name.lower().replace(' ', '_')]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) if config['write_fits']: self._make_extension_fits(ext, outfile + '.fits') if config['write_npy']: o_copy = dict(ext) self.logger.warning( 'Saving maps in .npy files is disabled b/c of incompatibilities in python3, remove the maps from the %s.npy' % outfile) print(o_copy) for xrm in ['tsmap']: o_copy.pop(xrm) np.save(outfile + '.npy', o_copy) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return ext
def sed(self, name, **kwargs): """Generate a spectral energy distribution (SED) for a source. This function will fit the normalization of the source in each energy bin. By default the SED will be generated with the analysis energy bins but a custom binning can be defined with the ``loge_bins`` parameter. Parameters ---------- name : str Source name. prefix : str Optional string that will be prepended to all output files (FITS and rendered images). loge_bins : `~numpy.ndarray` Sequence of energies in log10(E/MeV) defining the edges of the energy bins. If this argument is None then the analysis energy bins will be used. The energies in this sequence must align with the bin edges of the underyling analysis instance. bin_index : float Spectral index that will be use when fitting the energy distribution within an energy bin. use_local_index : bool Use a power-law approximation to the shape of the global spectrum in each bin. If this is false then a constant index set to `bin_index` will be used. fix_background : bool Fix background components when fitting the flux normalization in each energy bin. If fix_background=False then all background parameters that are currently free in the fit will be profiled. By default fix_background=True. ul_confidence : float Set the confidence level that will be used for the calculation of flux upper limits in each energy bin. cov_scale : float Scaling factor that will be applied when setting the gaussian prior on the normalization of free background sources. If this parameter is None then no gaussian prior will be applied. make_plots : bool Generate plots. write_fits : bool Write the output to a FITS file. write_npy : bool Write the output dictionary to a numpy file. optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- sed : dict Dictionary containing output of the SED analysis. """ name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['sed'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('make_plots', False) schema.add_option('write_fits', True) schema.add_option('write_npy', True) schema.add_option('loge_bins', None, '', list) config = utils.create_dict(self.config['sed'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing SED for %s' % name) o = self._make_sed(name, **config) filename = \ utils.format_filename(self.workdir, 'sed', prefix=[config['prefix'], name.lower().replace(' ', '_')]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(filename) + '.fits' self._make_sed_fits(o, filename + '.fits', **config) if config['write_npy']: np.save(filename + '.npy', o) if config['make_plots']: self._plotter.make_sed_plots(o, **config) self.logger.info('Finished SED') return o
def tsmap(self, prefix='', **kwargs): """Generate a spatial TS map for a source component with properties defined by the `model` argument. The TS map will have the same geometry as the ROI. The output of this method is a dictionary containing `~fermipy.skymap.Map` objects with the TS and amplitude of the best-fit test source. By default this method will also save maps to FITS files and render them as image files. This method uses a simplified likelihood fitting implementation that only fits for the normalization of the test source. Before running this method it is recommended to first optimize the ROI model (e.g. by running :py:meth:`~fermipy.gtanalysis.GTAnalysis.optimize`). Parameters ---------- prefix : str Optional string that will be prepended to all output files. {options} Returns ------- tsmap : dict A dictionary containing the `~fermipy.skymap.Map` objects for TS and source amplitude. """ timer = Timer.create(start=True) schema = ConfigSchema(self.defaults['tsmap']) schema.add_option('loglevel', logging.INFO) schema.add_option('map_skydir', None, '', astropy.coordinates.SkyCoord) schema.add_option('map_size', 1.0) schema.add_option('threshold', 1E-2, '', float) schema.add_option('use_pylike', True, '', bool) schema.add_option('outfile', None, '', str) config = schema.create_config(self.config['tsmap'], **kwargs) # Defining default properties of test source model config['model'].setdefault('Index', 2.0) config['model'].setdefault('SpectrumType', 'PowerLaw') config['model'].setdefault('SpatialModel', 'PointSource') self.logger.log(config['loglevel'], 'Generating TS map') o = self._make_tsmap_fast(prefix, **config) if config['make_plots']: plotter = plotting.AnalysisPlotter(self.config['plotting'], fileio=self.config['fileio'], logging=self.config['logging']) plotter.make_tsmap_plots(o, self.roi) self.logger.log(config['loglevel'], 'Finished TS map') outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename(self.workdir, 'tsmap', prefix=[o['name']]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) if config['write_fits']: o['file'] = os.path.basename(outfile) + '.fits' self._make_tsmap_fits(o, outfile + '.fits') if config['write_npy']: np.save(outfile + '.npy', o) self.logger.log(config['loglevel'], 'Execution time: %.2f s', timer.elapsed_time) return o
def _make_ts_cube(self, prefix, **kwargs): skywcs = kwargs.get('wcs', self.geom.wcs) npix = kwargs.get('npix', self.npix) galactic = wcs_utils.is_galactic(skywcs) ref_skydir = wcs_utils.wcs_to_skydir(skywcs) refdir = pyLike.SkyDir(ref_skydir.ra.deg, ref_skydir.dec.deg) pixsize = np.abs(skywcs.wcs.cdelt[0]) skyproj = pyLike.FitScanner.buildSkyProj(str("AIT"), refdir, pixsize, npix, galactic) src_dict = copy.deepcopy(kwargs.setdefault('model', {})) src_dict = {} if src_dict is None else src_dict xpix, ypix = (np.round((self.npix - 1.0) / 2.), np.round((self.npix - 1.0) / 2.)) skydir = wcs_utils.pix_to_skydir(xpix, ypix, skywcs) src_dict['ra'] = skydir.ra.deg src_dict['dec'] = skydir.dec.deg src_dict.setdefault('SpatialModel', 'PointSource') src_dict.setdefault('SpatialWidth', 0.3) src_dict.setdefault('Index', 2.0) src_dict.setdefault('Prefactor', 1E-13) src_dict['name'] = 'tscube_testsource' src = Source.create_from_dict(src_dict) modelname = utils.create_model_name(src) optFactory = pyLike.OptimizerFactory_instance() optObject = optFactory.create(str("MINUIT"), self.components[0].like.logLike) pylike_src = self.components[0]._create_source(src) fitScanner = pyLike.FitScanner(self.like.composite, optObject, skyproj, npix, npix) pylike_src.spectrum().normPar().setBounds(0, 1E6) fitScanner.setTestSource(pylike_src) self.logger.info("Running tscube") outfile = utils.format_filename(self.config['fileio']['workdir'], 'tscube.fits', prefix=[prefix]) try: fitScanner.run_tscube(True, kwargs['do_sed'], kwargs['nnorm'], kwargs['norm_sigma'], kwargs['cov_scale_bb'], kwargs['cov_scale'], kwargs['tol'], kwargs['max_iter'], kwargs['tol_type'], kwargs['remake_test_source'], kwargs['st_scan_level'], str(''), kwargs['init_lambda']) except Exception: fitScanner.run_tscube(True, kwargs['do_sed'], kwargs['nnorm'], kwargs['norm_sigma'], kwargs['cov_scale_bb'], kwargs['cov_scale'], kwargs['tol'], kwargs['max_iter'], kwargs['tol_type'], kwargs['remake_test_source'], kwargs['st_scan_level']) self.logger.info("Writing FITS output") fitScanner.writeFitsFile(str(outfile), str("gttscube")) convert_tscube(str(outfile), str(outfile)) tscube = castro.TSCube.create_from_fits(outfile) ts_map = tscube.tsmap norm_map = tscube.normmap npred_map = copy.deepcopy(norm_map) npred_map.data *= tscube.refSpec.ref_npred.sum() amp_map = copy.deepcopy(norm_map) amp_map.data *= src_dict['Prefactor'] sqrt_ts_map = copy.deepcopy(ts_map) sqrt_ts_map.data[...] = np.abs(sqrt_ts_map.data)**0.5 o = {'name': utils.join_strings([prefix, modelname]), 'src_dict': copy.deepcopy(src_dict), 'file': os.path.basename(outfile), 'ts': ts_map, 'sqrt_ts': sqrt_ts_map, 'npred': npred_map, 'amplitude': amp_map, 'config': kwargs, 'tscube': tscube } if not kwargs['write_fits']: os.remove(outfile) os['file'] = None self.logger.info("Done") return o
def extension(self, name, **kwargs): """Test this source for spatial extension with the likelihood ratio method (TS_ext). This method will substitute an extended spatial model for the given source and perform a one-dimensional scan of the spatial extension parameter over the range specified with the width parameters. The 1-D profile likelihood is then used to compute the best-fit value, upper limit, and TS for extension. The nuisance parameters that will be simultaneously fit when performing the spatial scan can be controlled with the ``fix_shape``, ``free_background``, and ``free_radius`` options. By default the position of the source will be fixed to its current position. A simultaneous fit to position and extension can be performed by setting ``fit_position`` to True. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- extension : dict Dictionary containing results of the extension analysis. The same dictionary is also saved to the dictionary of this source under 'extension'. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['extension'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('outfile', None, '', str) config = utils.create_dict(self.config['extension'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running extension fit for %s', name) free_state = FreeParameterState(self) ext = self._extension(name, **config) free_state.restore() self.logger.info('Finished extension fit.') if config['make_plots']: self._plotter.make_extension_plots(ext, self.roi, prefix=config['prefix']) outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename(self.workdir, 'ext', prefix=[config['prefix'], name.lower().replace(' ', '_')]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) if config['write_fits']: self._make_extension_fits(ext, outfile + '.fits') if config['write_npy']: np.save(outfile + '.npy', dict(ext)) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return ext
def _make_tsmap_fast(self, prefix, **kwargs): """ Make a TS map from a GTAnalysis instance. This is a simplified implementation optimized for speed that only fits for the source normalization (all background components are kept fixed). The spectral/spatial characteristics of the test source can be defined with the src_dict argument. By default this method will generate a TS map for a point source with an index=2.0 power-law spectrum. Parameters ---------- model : dict or `~fermipy.roi_model.Source` Dictionary or Source object defining the properties of the test source that will be used in the scan. """ src_dict = copy.deepcopy(kwargs.setdefault('model', {})) src_dict = {} if src_dict is None else src_dict multithread = kwargs.setdefault('multithread', False) threshold = kwargs.setdefault('threshold', 1E-2) max_kernel_radius = kwargs.get('max_kernel_radius') loge_bounds = kwargs.setdefault('loge_bounds', None) if loge_bounds is not None: if len(loge_bounds) == 0: loge_bounds = [None, None] elif len(loge_bounds) == 1: loge_bounds += [None] loge_bounds[0] = (loge_bounds[0] if loge_bounds[0] is not None else self.log_energies[0]) loge_bounds[1] = (loge_bounds[1] if loge_bounds[1] is not None else self.log_energies[-1]) else: loge_bounds = [self.log_energies[0], self.log_energies[-1]] # Put the test source at the pixel closest to the ROI center xpix, ypix = (np.round((self.npix - 1.0) / 2.), np.round((self.npix - 1.0) / 2.)) cpix = np.array([xpix, ypix]) skywcs = self._skywcs skydir = wcs_utils.pix_to_skydir(cpix[0], cpix[1], skywcs) src_dict['ra'] = skydir.ra.deg src_dict['dec'] = skydir.dec.deg src_dict.setdefault('SpatialModel', 'PointSource') src_dict.setdefault('SpatialWidth', 0.3) src_dict.setdefault('Index', 2.0) src_dict.setdefault('Prefactor', 1E-13) counts = [] bkg = [] model = [] c0_map = [] eslices = [] enumbins = [] model_npred = 0 for c in self.components: imin = utils.val_to_edge(c.log_energies, loge_bounds[0])[0] imax = utils.val_to_edge(c.log_energies, loge_bounds[1])[0] eslice = slice(imin, imax) bm = c.model_counts_map(exclude=kwargs['exclude']).counts.astype('float')[ eslice, ...] cm = c.counts_map().counts.astype('float')[eslice, ...] bkg += [bm] counts += [cm] c0_map += [cash(cm, bm)] eslices += [eslice] enumbins += [cm.shape[0]] self.add_source('tsmap_testsource', src_dict, free=True, init_source=False) src = self.roi['tsmap_testsource'] # self.logger.info(str(src_dict)) modelname = utils.create_model_name(src) for c, eslice in zip(self.components, eslices): mm = c.model_counts_map('tsmap_testsource').counts.astype('float')[ eslice, ...] model_npred += np.sum(mm) model += [mm] self.delete_source('tsmap_testsource') for i, mm in enumerate(model): dpix = 3 for j in range(mm.shape[0]): ix, iy = np.unravel_index( np.argmax(mm[j, ...]), mm[j, ...].shape) mx = mm[j, ix, :] > mm[j, ix, iy] * threshold my = mm[j, :, iy] > mm[j, ix, iy] * threshold dpix = max(dpix, np.round(np.sum(mx) / 2.)) dpix = max(dpix, np.round(np.sum(my) / 2.)) if max_kernel_radius is not None and \ dpix > int(max_kernel_radius / self.components[i].binsz): dpix = int(max_kernel_radius / self.components[i].binsz) xslice = slice(max(int(xpix - dpix), 0), min(int(xpix + dpix + 1), self.npix)) model[i] = model[i][:, xslice, xslice] ts_values = np.zeros((self.npix, self.npix)) amp_values = np.zeros((self.npix, self.npix)) wrap = functools.partial(_ts_value_newton, counts=counts, bkg=bkg, model=model, C_0_map=c0_map) if kwargs['map_skydir'] is not None: map_offset = wcs_utils.skydir_to_pix(kwargs['map_skydir'], self._skywcs) map_delta = 0.5 * kwargs['map_size'] / self.components[0].binsz xmin = max(int(np.ceil(map_offset[1] - map_delta)), 0) xmax = min(int(np.floor(map_offset[1] + map_delta)) + 1, self.npix) ymin = max(int(np.ceil(map_offset[0] - map_delta)), 0) ymax = min(int(np.floor(map_offset[0] + map_delta)) + 1, self.npix) xslice = slice(xmin, xmax) yslice = slice(ymin, ymax) xyrange = [range(xmin, xmax), range(ymin, ymax)] map_wcs = skywcs.deepcopy() map_wcs.wcs.crpix[0] -= ymin map_wcs.wcs.crpix[1] -= xmin else: xyrange = [range(self.npix), range(self.npix)] map_wcs = skywcs xslice = slice(0, self.npix) yslice = slice(0, self.npix) positions = [] for i, j in itertools.product(xyrange[0], xyrange[1]): p = [[k // 2, i, j] for k in enumbins] positions += [p] if multithread: pool = Pool() results = pool.map(wrap, positions) pool.close() pool.join() else: results = map(wrap, positions) for i, r in enumerate(results): ix = positions[i][0][1] iy = positions[i][0][2] ts_values[ix, iy] = r[0] amp_values[ix, iy] = r[1] ts_values = ts_values[xslice, yslice] amp_values = amp_values[xslice, yslice] ts_map = Map(ts_values, map_wcs) sqrt_ts_map = Map(ts_values**0.5, map_wcs) npred_map = Map(amp_values * model_npred, map_wcs) amp_map = Map(amp_values * src.get_norm(), map_wcs) o = {'name': utils.join_strings([prefix, modelname]), 'src_dict': copy.deepcopy(src_dict), 'file': None, 'ts': ts_map, 'sqrt_ts': sqrt_ts_map, 'npred': npred_map, 'amplitude': amp_map, 'config': kwargs } fits_file = utils.format_filename(self.config['fileio']['workdir'], 'tsmap.fits', prefix=[prefix, modelname]) if kwargs['write_fits']: fits_utils.write_maps(ts_map, {'SQRT_TS_MAP': sqrt_ts_map, 'NPRED_MAP': npred_map, 'N_MAP': amp_map}, fits_file) o['file'] = os.path.basename(fits_file) if kwargs['write_npy']: np.save(os.path.splitext(fits_file)[0] + '.npy', o) return o
def sed(self, name, **kwargs): """Generate a spectral energy distribution (SED) for a source. This function will fit the normalization of the source in each energy bin. By default the SED will be generated with the analysis energy bins but a custom binning can be defined with the ``loge_bins`` parameter. Parameters ---------- name : str Source name. prefix : str Optional string that will be prepended to all output files (FITS and rendered images). loge_bins : `~numpy.ndarray` Sequence of energies in log10(E/MeV) defining the edges of the energy bins. If this argument is None then the analysis energy bins will be used. The energies in this sequence must align with the bin edges of the underyling analysis instance. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- sed : dict Dictionary containing output of the SED analysis. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['sed'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('outfile', None, '', str) schema.add_option('loge_bins', None, '', list) config = utils.create_dict(self.config['sed'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing SED for %s' % name) o = self._make_sed(name, **config) self.logger.info('Finished SED') outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename(self.workdir, 'sed', prefix=[config['prefix'], name.lower().replace(' ', '_')]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(outfile) + '.fits' self._make_sed_fits(o, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', o) if config['make_plots']: self._plotter.make_sed_plots(o, **config) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return o
def localize(self, name, **kwargs): """Find the best-fit position of a source. Localization is performed in two steps. First a TS map is computed centered on the source with half-width set by ``dtheta_max``. A fit is then performed to the maximum TS peak in this map. The source position is then further refined by scanning the likelihood in the vicinity of the peak found in the first step. The size of the scan region is set to encompass the 99% positional uncertainty contour as determined from the peak fit. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- localize : dict Dictionary containing results of the localization analysis. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['localize'], optimizer=self.defaults['optimizer']) schema.add_option('use_cache', True) schema.add_option('prefix', '') config = utils.create_dict(self.config['localize'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running localization for %s' % name) free_state = FreeParameterState(self) loc = self._localize(name, **config) free_state.restore() self.logger.info('Finished localization.') if config['make_plots']: self._plotter.make_localization_plots(loc, self.roi, prefix=config['prefix']) outfile = \ utils.format_filename(self.workdir, 'loc', prefix=[config['prefix'], name.lower().replace(' ', '_')]) if config['write_fits']: loc['file'] = os.path.basename(outfile) + '.fits' self._make_localize_fits(loc, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', dict(loc)) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return loc
def _make_residual_map(self, prefix, config, **kwargs): write_fits = kwargs.get('write_fits', True) write_npy = kwargs.get('write_npy', True) src_dict = copy.deepcopy(config.setdefault('model', {})) exclude = config.setdefault('exclude', None) loge_bounds = config.setdefault('loge_bounds', None) if loge_bounds is not None: if len(loge_bounds) == 0: loge_bounds = [None, None] elif len(loge_bounds) == 1: loge_bounds += [None] loge_bounds[0] = (loge_bounds[0] if loge_bounds[0] is not None else self.energies[0]) loge_bounds[1] = (loge_bounds[1] if loge_bounds[1] is not None else self.energies[-1]) else: loge_bounds = [self.energies[0], self.energies[-1]] # Put the test source at the pixel closest to the ROI center xpix, ypix = (np.round( (self.npix - 1.0) / 2.), np.round((self.npix - 1.0) / 2.)) cpix = np.array([xpix, ypix]) skywcs = self._skywcs skydir = wcs_utils.pix_to_skydir(cpix[0], cpix[1], skywcs) if src_dict is None: src_dict = {} src_dict['ra'] = skydir.ra.deg src_dict['dec'] = skydir.dec.deg src_dict.setdefault('SpatialModel', 'PointSource') src_dict.setdefault('SpatialWidth', 0.3) src_dict.setdefault('Index', 2.0) kernel = None if src_dict['SpatialModel'] == 'Gaussian': kernel = utils.make_gaussian_kernel(src_dict['SpatialWidth'], cdelt=self.components[0].binsz, npix=101) kernel /= np.sum(kernel) cpix = [50, 50] self.add_source('residmap_testsource', src_dict, free=True, init_source=False, save_source_maps=False) src = self.roi.get_source_by_name('residmap_testsource') modelname = utils.create_model_name(src) npix = self.components[0].npix mmst = np.zeros((npix, npix)) cmst = np.zeros((npix, npix)) emst = np.zeros((npix, npix)) sm = get_source_kernel(self, 'residmap_testsource', kernel) ts = np.zeros((npix, npix)) sigma = np.zeros((npix, npix)) excess = np.zeros((npix, npix)) self.delete_source('residmap_testsource') for i, c in enumerate(self.components): imin = utils.val_to_edge(c.energies, loge_bounds[0])[0] imax = utils.val_to_edge(c.energies, loge_bounds[1])[0] mc = c.model_counts_map(exclude=exclude).counts.astype('float') cc = c.counts_map().counts.astype('float') ec = np.ones(mc.shape) ccs = convolve_map(cc, sm[i], cpix, imin=imin, imax=imax) mcs = convolve_map(mc, sm[i], cpix, imin=imin, imax=imax) ecs = convolve_map(ec, sm[i], cpix, imin=imin, imax=imax) cms = np.sum(ccs, axis=0) mms = np.sum(mcs, axis=0) ems = np.sum(ecs, axis=0) cmst += cms mmst += mms emst += ems # cts = 2.0 * (poisson_lnl(cms, cms) - poisson_lnl(cms, mms)) excess += cms - mms ts = 2.0 * (poisson_lnl(cmst, cmst) - poisson_lnl(cmst, mmst)) sigma = np.sqrt(ts) sigma[excess < 0] *= -1 emst /= np.max(emst) sigma_map = Map(sigma, skywcs) model_map = Map(mmst / emst, skywcs) data_map = Map(cmst / emst, skywcs) excess_map = Map(excess / emst, skywcs) o = { 'name': utils.join_strings([prefix, modelname]), 'file': None, 'sigma': sigma_map, 'model': model_map, 'data': data_map, 'excess': excess_map, 'config': config } fits_file = utils.format_filename(self.config['fileio']['workdir'], 'residmap.fits', prefix=[prefix, modelname]) if write_fits: fits_utils.write_maps( sigma_map, { 'DATA_MAP': data_map, 'MODEL_MAP': model_map, 'EXCESS_MAP': excess_map }, fits_file) o['file'] = os.path.basename(fits_file) if write_npy: np.save(os.path.splitext(fits_file)[0] + '.npy', o) return o