def method(self,**kwargs): schema = config.ConfigSchema(self.defaults['secA'], secB=self.defaults['secB']) schema.add_option('extraOptA',3.0) schema.add_option('extraOptB',False) cfg = utils.create_dict(self.config['secA'],secB=self.config['secB']) return schema.create_config(cfg, **kwargs)
def method(self, **kwargs): schema = config.ConfigSchema(self.defaults['secA'], secB=self.defaults['secB']) schema.add_option('extraOptA', 3.0) schema.add_option('extraOptB', False) cfg = utils.create_dict(self.config['secA'], secB=self.config['secB']) return schema.create_config(cfg, **kwargs)
def lightcurve(self, name, **kwargs): """Generate a lightcurve for the named source. The function will complete the basic analysis steps for each bin and perform a likelihood fit for each bin. Extracted values (along with errors) are Integral Flux, spectral model, Spectral index, TS value, pred. # of photons. Note: successful calculation of TS:subscript:`var` requires at least one free background parameter and a previously optimized ROI model. Parameters --------- name: str source name {options} Returns --------- LightCurve : dict Dictionary containing output of the LC analysis """ name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['lightcurve'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') config = utils.create_dict(self.config['lightcurve'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing Lightcurve for %s' % name) o = self._make_lc(name, **config) filename = utils.format_filename( self.workdir, 'lightcurve', prefix=[config['prefix'], name.lower().replace(' ', '_')]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(filename) + '.fits' self._make_lc_fits(o, filename + '.fits', **config) if config['write_npy']: np.save(filename + '.npy', o) self.logger.info('Finished Lightcurve') return o
def lightcurve(self, name, **kwargs): """Generate a lightcurve for the named source. The function will complete the basic analysis steps for each bin and perform a likelihood fit for each bin. Extracted values (along with errors) are Integral Flux, spectral model, Spectral index, TS value, pred. # of photons. Note: successful calculation of TS:subscript:`var` requires at least one free background parameter and a previously optimized ROI model. Parameters --------- name: str source name {options} Returns --------- LightCurve : dict Dictionary containing output of the LC analysis """ name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['lightcurve'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') config = utils.create_dict(self.config['lightcurve'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing Lightcurve for %s' % name) o = self._make_lc(name, **config) filename = utils.format_filename(self.workdir, 'lightcurve', prefix=[config['prefix'], name.lower().replace(' ', '_')]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(filename) + '.fits' self._make_lc_fits(o, filename + '.fits', **config) if config['write_npy']: np.save(filename + '.npy', o) self.logger.info('Finished Lightcurve') return o
def find_sources(self, prefix='', **kwargs): """An iterative source-finding algorithm that uses likelihood ratio (TS) maps of the region of interest to find new sources. After each iteration a new TS map is generated incorporating sources found in the previous iteration. The method stops when the number of iterations exceeds ``max_iter`` or no sources exceeding ``sqrt_ts_threshold`` are found. Parameters ---------- {options} tsmap : dict Keyword arguments dictionary for tsmap method. tscube : dict Keyword arguments dictionary for tscube method. Returns ------- peaks : list List of peak objects. sources : list List of source objects. """ timer = Timer.create(start=True) self.logger.info('Starting.') schema = ConfigSchema(self.defaults['sourcefind'], tsmap=self.defaults['tsmap'], tscube=self.defaults['tscube']) schema.add_option('search_skydir', None, '', SkyCoord) schema.add_option('search_minmax_radius', [None, 1.0], '', list) config = utils.create_dict(self.config['sourcefind'], tsmap=self.config['tsmap'], tscube=self.config['tscube']) config = schema.create_config(config, **kwargs) # Defining default properties of test source model config['model'].setdefault('Index', 2.0) config['model'].setdefault('SpectrumType', 'PowerLaw') config['model'].setdefault('SpatialModel', 'PointSource') config['model'].setdefault('Prefactor', 1E-13) o = {'sources': [], 'peaks': []} for i in range(config['max_iter']): srcs, peaks = self._find_sources_iterate(prefix, i, **config) self.logger.info('Found %i sources in iteration %i.' % (len(srcs), i)) o['sources'] += srcs o['peaks'] += peaks if len(srcs) == 0: break self.logger.info('Done.') self.logger.info('Execution time: %.2f s', timer.elapsed_time) return o
def localize(self, name, **kwargs): """Find the best-fit position of a source. Localization is performed in two steps. First a TS map is computed centered on the source with half-width set by ``dtheta_max``. A fit is then performed to the maximum TS peak in this map. The source position is then further refined by scanning the likelihood in the vicinity of the peak found in the first step. The size of the scan region is set to encompass the 99% positional uncertainty contour as determined from the peak fit. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- localize : dict Dictionary containing results of the localization analysis. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['localize'], optimizer=self.defaults['optimizer']) schema.add_option('use_cache', True) schema.add_option('prefix', '') config = utils.create_dict(self.config['localize'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running localization for %s' % name) free_state = FreeParameterState(self) loc = self._localize(name, **config) free_state.restore() self.logger.info('Finished localization.') if config['make_plots']: self._plotter.make_localization_plots(loc, self.roi, prefix=config['prefix']) outfile = \ utils.format_filename(self.workdir, 'loc', prefix=[config['prefix'], name.lower().replace(' ', '_')]) if config['write_fits']: loc['file'] = os.path.basename(outfile) + '.fits' self._make_localize_fits(loc, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', dict(loc)) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return loc
def find_sources(self, prefix='', **kwargs): """An iterative source-finding algorithm that uses likelihood ratio (TS) maps of the region of interest to find new sources. After each iteration a new TS map is generated incorporating sources found in the previous iteration. The method stops when the number of iterations exceeds ``max_iter`` or no sources exceeding ``sqrt_ts_threshold`` are found. Parameters ---------- {options} tsmap : dict Keyword arguments dictionary for tsmap method. tscube : dict Keyword arguments dictionary for tscube method. Returns ------- peaks : list List of peak objects. sources : list List of source objects. """ self.logger.info('Starting.') schema = ConfigSchema(self.defaults['sourcefind'], tsmap=self.defaults['tsmap'], tscube=self.defaults['tscube']) schema.add_option('search_skydir', None, '', SkyCoord) schema.add_option('search_minmax_radius', [None, 1.0], '', list) config = utils.create_dict(self.config['sourcefind'], tsmap=self.config['tsmap'], tscube=self.config['tscube']) config = schema.create_config(config, **kwargs) # Defining default properties of test source model config['model'].setdefault('Index', 2.0) config['model'].setdefault('SpectrumType', 'PowerLaw') config['model'].setdefault('SpatialModel', 'PointSource') config['model'].setdefault('Prefactor', 1E-13) o = {'sources': [], 'peaks': []} for i in range(config['max_iter']): srcs, peaks = self._find_sources_iterate(prefix, i, **config) self.logger.info('Found %i sources in iteration %i.' % (len(srcs), i)) o['sources'] += srcs o['peaks'] += peaks if len(srcs) == 0: break self.logger.info('Done.') return o
def localize(self, name, **kwargs): """Find the best-fit position of a source. Localization is performed in two steps. First a TS map is computed centered on the source with half-width set by ``dtheta_max``. A fit is then performed to the maximum TS peak in this map. The source position is then further refined by scanning the likelihood in the vicinity of the peak found in the first step. The size of the scan region is set to encompass the 99% positional uncertainty contour as determined from the peak fit. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- localize : dict Dictionary containing results of the localization analysis. """ name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['localize'], optimizer=self.defaults['optimizer']) schema.add_option('use_cache', True) schema.add_option('prefix', '') config = utils.create_dict(self.config['localize'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running localization for %s' % name) free_state = FreeParameterState(self) loc = self._localize(name, **config) free_state.restore() self.logger.info('Finished localization.') if config['make_plots']: self._plotter.make_localization_plots(loc, self.roi, prefix=config['prefix']) outfile = \ utils.format_filename(self.workdir, 'loc', prefix=[config['prefix'], name.lower().replace(' ', '_')]) if config['write_fits']: loc['file'] = os.path.basename(outfile) + '.fits' self._make_localize_fits(loc, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', dict(loc)) return loc
def extension(self, name, **kwargs): """Test this source for spatial extension with the likelihood ratio method (TS_ext). This method will substitute an extended spatial model for the given source and perform a one-dimensional scan of the spatial extension parameter over the range specified with the width parameters. The 1-D profile likelihood is then used to compute the best-fit value, upper limit, and TS for extension. The nuisance parameters that will be simultaneously fit when performing the spatial scan can be controlled with the ``fix_shape``, ``free_background``, and ``free_radius`` options. By default the position of the source will be fixed to its current position. A simultaneous fit to position and extension can be performed by setting ``fit_position`` to True. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- extension : dict Dictionary containing results of the extension analysis. The same dictionary is also saved to the dictionary of this source under 'extension'. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['extension'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('outfile', None, '', str) config = utils.create_dict(self.config['extension'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running extension fit for %s', name) free_state = FreeParameterState(self) ext = self._extension(name, **config) free_state.restore() self.logger.info('Finished extension fit.') if config['make_plots']: self._plotter.make_extension_plots(ext, self.roi, prefix=config['prefix']) outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename( self.workdir, 'ext', prefix=[config['prefix'], name.lower().replace(' ', '_')]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) if config['write_fits']: self._make_extension_fits(ext, outfile + '.fits') if config['write_npy']: o_copy = dict(ext) self.logger.warning( 'Saving maps in .npy files is disabled b/c of incompatibilities in python3, remove the maps from the %s.npy' % outfile) print(o_copy) for xrm in ['tsmap']: o_copy.pop(xrm) np.save(outfile + '.npy', o_copy) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return ext
def sed(self, name, **kwargs): """Generate a spectral energy distribution (SED) for a source. This function will fit the normalization of the source in each energy bin. By default the SED will be generated with the analysis energy bins but a custom binning can be defined with the ``loge_bins`` parameter. Parameters ---------- name : str Source name. prefix : str Optional string that will be prepended to all output files (FITS and rendered images). loge_bins : `~numpy.ndarray` Sequence of energies in log10(E/MeV) defining the edges of the energy bins. If this argument is None then the analysis energy bins will be used. The energies in this sequence must align with the bin edges of the underyling analysis instance. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- sed : dict Dictionary containing output of the SED analysis. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['sed'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('outfile', None, '', str) schema.add_option('loge_bins', None, '', list) config = utils.create_dict(self.config['sed'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing SED for %s' % name) o = self._make_sed(name, **config) self.logger.info('Finished SED') outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename(self.workdir, 'sed', prefix=[config['prefix'], name.lower().replace(' ', '_')]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(outfile) + '.fits' self._make_sed_fits(o, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', o) if config['make_plots']: self._plotter.make_sed_plots(o, **config) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return o
def extension(self, name, **kwargs): """Test this source for spatial extension with the likelihood ratio method (TS_ext). This method will substitute an extended spatial model for the given source and perform a one-dimensional scan of the spatial extension parameter over the range specified with the width parameters. The 1-D profile likelihood is then used to compute the best-fit value, upper limit, and TS for extension. The nuisance parameters that will be simultaneously fit when performing the spatial scan can be controlled with the ``fix_shape``, ``free_background``, and ``free_radius`` options. By default the position of the source will be fixed to its current position. A simultaneous fit to position and extension can be performed by setting ``fit_position`` to True. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- extension : dict Dictionary containing results of the extension analysis. The same dictionary is also saved to the dictionary of this source under 'extension'. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['extension'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('outfile', None, '', str) config = utils.create_dict(self.config['extension'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running extension fit for %s', name) free_state = FreeParameterState(self) ext = self._extension(name, **config) free_state.restore() self.logger.info('Finished extension fit.') if config['make_plots']: self._plotter.make_extension_plots(ext, self.roi, prefix=config['prefix']) outfile = config.get('outfile', None) if outfile is None: outfile = utils.format_filename(self.workdir, 'ext', prefix=[config['prefix'], name.lower().replace(' ', '_')]) else: outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0]) if config['write_fits']: self._make_extension_fits(ext, outfile + '.fits') if config['write_npy']: np.save(outfile + '.npy', dict(ext)) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return ext
def sed(self, name, **kwargs): """Generate a spectral energy distribution (SED) for a source. This function will fit the normalization of the source in each energy bin. By default the SED will be generated with the analysis energy bins but a custom binning can be defined with the ``loge_bins`` parameter. Parameters ---------- name : str Source name. prefix : str Optional string that will be prepended to all output files (FITS and rendered images). loge_bins : `~numpy.ndarray` Sequence of energies in log10(E/MeV) defining the edges of the energy bins. If this argument is None then the analysis energy bins will be used. The energies in this sequence must align with the bin edges of the underyling analysis instance. bin_index : float Spectral index that will be use when fitting the energy distribution within an energy bin. use_local_index : bool Use a power-law approximation to the shape of the global spectrum in each bin. If this is false then a constant index set to `bin_index` will be used. fix_background : bool Fix background components when fitting the flux normalization in each energy bin. If fix_background=False then all background parameters that are currently free in the fit will be profiled. By default fix_background=True. ul_confidence : float Set the confidence level that will be used for the calculation of flux upper limits in each energy bin. cov_scale : float Scaling factor that will be applied when setting the gaussian prior on the normalization of free background sources. If this parameter is None then no gaussian prior will be applied. make_plots : bool Generate plots. write_fits : bool Write the output to a FITS file. write_npy : bool Write the output dictionary to a numpy file. optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- sed : dict Dictionary containing output of the SED analysis. """ name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['sed'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') schema.add_option('make_plots', False) schema.add_option('write_fits', True) schema.add_option('write_npy', True) schema.add_option('loge_bins', None, '', list) config = utils.create_dict(self.config['sed'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing SED for %s' % name) o = self._make_sed(name, **config) filename = \ utils.format_filename(self.workdir, 'sed', prefix=[config['prefix'], name.lower().replace(' ', '_')]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(filename) + '.fits' self._make_sed_fits(o, filename + '.fits', **config) if config['write_npy']: np.save(filename + '.npy', o) if config['make_plots']: self._plotter.make_sed_plots(o, **config) self.logger.info('Finished SED') return o
def localize(self, name, **kwargs): """Find the best-fit position of a source. Localization is performed in two steps. First a TS map is computed centered on the source with half-width set by ``dtheta_max``. A fit is then performed to the maximum TS peak in this map. The source position is then further refined by scanning the likelihood in the vicinity of the peak found in the first step. The size of the scan region is set to encompass the 99% positional uncertainty contour as determined from the peak fit. Parameters ---------- name : str Source name. dtheta_max : float Maximum offset in RA/DEC in deg from the nominal source position that will be used to define the boundaries of the TS map search region. nstep : int Number of steps in longitude/latitude that will be taken when refining the source position. The bounds of the scan range are set to the 99% positional uncertainty as determined from the TS map peak fit. The total number of sampling points will be nstep**2. fix_background : bool Fix background parameters when fitting the source position. update : bool Update the model for this source with the best-fit position. If newname=None this will overwrite the existing source map of this source with one corresponding to its new location. newname : str Name that will be assigned to the relocalized source when update=True. If newname is None then the existing source name will be used. make_plots : bool Generate plots. write_fits : bool Write the output to a FITS file. write_npy : bool Write the output dictionary to a numpy file. optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- localize : dict Dictionary containing results of the localization analysis. This dictionary is also saved to the dictionary of this source in 'localize'. """ name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['localize'], optimizer=self.defaults['optimizer']) schema.add_option('make_plots', False) schema.add_option('write_fits', True) schema.add_option('write_npy', True) schema.add_option('newname', name) schema.add_option('prefix', '') config = utils.create_dict(self.config['localize'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) nstep = config['nstep'] dtheta_max = config['dtheta_max'] update = config['update'] newname = config['newname'] prefix = config['prefix'] self.logger.info('Running localization for %s' % name) saved_state = LikelihoodState(self.like) if config['fix_background']: self.free_sources(free=False, loglevel=logging.DEBUG) src = self.roi.copy_source(name) skydir = src.skydir skywcs = self._skywcs src_pix = skydir.to_pixel(skywcs) tsmap_fit, tsmap = self._localize_tsmap(name, prefix=prefix, dtheta_max=dtheta_max) self.logger.debug( 'Completed localization with TS Map.\n' '(ra,dec) = (%10.4f,%10.4f)\n' '(glon,glat) = (%10.4f,%10.4f)', tsmap_fit['ra'], tsmap_fit['dec'], tsmap_fit['glon'], tsmap_fit['glat']) # Fit baseline (point-source) model self.free_norm(name) fit_output = self._fit(loglevel=logging.DEBUG, **config['optimizer']) # Save likelihood value for baseline fit loglike0 = fit_output['loglike'] self.logger.debug('Baseline Model Likelihood: %f', loglike0) self.zero_source(name) o = { 'name': name, 'config': config, 'fit_success': True, 'loglike_base': loglike0, 'loglike_loc': np.nan, 'dloglike_loc': np.nan } cdelt0 = np.abs(skywcs.wcs.cdelt[0]) cdelt1 = np.abs(skywcs.wcs.cdelt[1]) scan_step = 2.0 * tsmap_fit['r95'] / (nstep - 1.0) self.logger.debug( 'Refining localization search to ' 'region of width: %.4f deg', tsmap_fit['r95']) scan_map = Map.create(SkyCoord(tsmap_fit['ra'], tsmap_fit['dec'], unit='deg'), scan_step, (nstep, nstep), coordsys=wcs_utils.get_coordsys(skywcs)) scan_skydir = scan_map.get_pixel_skydirs() lnlscan = dict(wcs=scan_map.wcs.to_header().items(), loglike=np.zeros((nstep, nstep)), dloglike=np.zeros((nstep, nstep)), dloglike_fit=np.zeros((nstep, nstep))) for i, t in enumerate(scan_skydir): model_name = '%s_localize' % (name.replace(' ', '').lower()) src.set_name(model_name) src.set_position(t) self.add_source(model_name, src, free=True, init_source=False, save_source_maps=False, loglevel=logging.DEBUG) fit_output = self._fit(loglevel=logging.DEBUG, **config['optimizer']) loglike1 = fit_output['loglike'] lnlscan['loglike'].flat[i] = loglike1 self.delete_source(model_name, loglevel=logging.DEBUG) lnlscan['dloglike'] = lnlscan['loglike'] - np.max(lnlscan['loglike']) scan_tsmap = Map(2.0 * lnlscan['dloglike'].T, scan_map.wcs) self.unzero_source(name) saved_state.restore() self._sync_params(name) self._update_roi() scan_fit, new_skydir = fit_error_ellipse(scan_tsmap, dpix=3) o.update(scan_fit) o['loglike_loc'] = np.max( lnlscan['loglike']) + 0.5 * scan_fit['offset'] o['dloglike_loc'] = o['loglike_loc'] - o['loglike_base'] # lnlscan['dloglike_fit'] = \ # utils.parabola(np.linspace(0,nstep-1.0,nstep)[:,np.newaxis], # np.linspace(0,nstep-1.0,nstep)[np.newaxis,:], # *scan_fit['popt']).reshape((nstep,nstep)) o['lnlscan'] = lnlscan # Best fit position and uncertainty from fit to TS map o['tsmap_fit'] = tsmap_fit # Best fit position and uncertainty from pylike scan o['scan_fit'] = scan_fit pix = new_skydir.to_pixel(skywcs) o['xpix'] = float(pix[0]) o['ypix'] = float(pix[1]) o['deltax'] = (o['xpix'] - src_pix[0]) * cdelt0 o['deltay'] = (o['ypix'] - src_pix[1]) * cdelt1 o['offset'] = skydir.separation(new_skydir).deg if o['offset'] > dtheta_max: o['fit_success'] = False if not o['fit_success']: self.logger.error( 'Localization failed.\n' '(ra,dec) = (%10.4f,%10.4f)\n' '(glon,glat) = (%10.4f,%10.4f)\n' 'offset = %8.4f deltax = %8.4f ' 'deltay = %8.4f', o['ra'], o['dec'], o['glon'], o['glat'], o['offset'], o['deltax'], o['deltay']) else: self.logger.info( 'Localization succeeded with ' 'coordinates:\n' '(ra,dec) = (%10.4f,%10.4f)\n' '(glon,glat) = (%10.4f,%10.4f)\n' 'offset = %8.4f r68 = %8.4f', o['ra'], o['dec'], o['glon'], o['glat'], o['offset'], o['r68']) self.roi[name]['localize'] = copy.deepcopy(o) if config['make_plots']: self._plotter.make_localization_plots(o, tsmap, self.roi, prefix=prefix, skydir=scan_skydir) if update and o['fit_success']: self.logger.info('Updating source %s ' 'to localized position.', name) src = self.delete_source(name) src.set_position(new_skydir) src.set_name(newname, names=src.names) self.add_source(newname, src, free=True) fit_output = self.fit(loglevel=logging.DEBUG) o['loglike_loc'] = fit_output['loglike'] o['dloglike_loc'] = o['loglike_loc'] - o['loglike_base'] src = self.roi.get_source_by_name(newname) self.roi[newname]['localize'] = copy.deepcopy(o) self.logger.info('LogLike: %12.3f DeltaLogLike: %12.3f', o['loglike_loc'], o['dloglike_loc']) if o['fit_success']: src = self.roi.get_source_by_name(newname) src['pos_sigma'] = o['sigma'] src['pos_sigma_semimajor'] = o['sigma_semimajor'] src['pos_sigma_semiminor'] = o['sigma_semiminor'] src['pos_r68'] = o['r68'] src['pos_r95'] = o['r95'] src['pos_r99'] = o['r99'] src['pos_angle'] = np.degrees(o['theta']) self.logger.info('Finished localization.') return o
def find_sources(self, prefix='', **kwargs): """An iterative source-finding algorithm. Parameters ---------- model : dict Dictionary defining the properties of the test source. This is the model that will be used for generating TS maps. sqrt_ts_threshold : float Source threshold in sqrt(TS). Only peaks with sqrt(TS) exceeding this threshold will be used as seeds for new sources. min_separation : float Minimum separation in degrees of sources detected in each iteration. The source finder will look for the maximum peak in the TS map within a circular region of this radius. max_iter : int Maximum number of source finding iterations. The source finder will continue adding sources until no additional peaks are found or the number of iterations exceeds this number. sources_per_iter : int Maximum number of sources that will be added in each iteration. If the number of detected peaks in a given iteration is larger than this number, only the N peaks with the largest TS will be used as seeds for the current iteration. tsmap_fitter : str Set the method used internally for generating TS maps. Valid options: * tsmap * tscube tsmap : dict Keyword arguments dictionary for tsmap method. tscube : dict Keyword arguments dictionary for tscube method. Returns ------- peaks : list List of peak objects. sources : list List of source objects. """ self.logger.info('Starting.') schema = ConfigSchema(self.defaults['sourcefind'], tsmap=self.defaults['tsmap'], tscube=self.defaults['tscube']) schema.add_option('search_skydir', None, '', SkyCoord) schema.add_option('search_minmax_radius', [None, 1.0], '', list) config = utils.create_dict(self.config['sourcefind'], tsmap=self.config['tsmap'], tscube=self.config['tscube']) config = schema.create_config(config, **kwargs) # Defining default properties of test source model config['model'].setdefault('Index', 2.0) config['model'].setdefault('SpectrumType', 'PowerLaw') config['model'].setdefault('SpatialModel', 'PointSource') config['model'].setdefault('Prefactor', 1E-13) o = {'sources': [], 'peaks': []} for i in range(config['max_iter']): srcs, peaks = self._find_sources_iterate(prefix, i, **config) self.logger.info('Found %i sources in iteration %i.' % (len(srcs), i)) o['sources'] += srcs o['peaks'] += peaks if len(srcs) == 0: break self.logger.info('Done.') return o