예제 #1
0
파일: residmap.py 프로젝트: tuoyl/fermipy
    def residmap(self, prefix='', **kwargs):
        """Generate 2-D spatial residual maps using the current ROI
        model and the convolution kernel defined with the `model`
        argument.

        Parameters
        ----------
        prefix : str
            String that will be prefixed to the output residual map files.

        {options}

        Returns
        -------
        maps : dict
           A dictionary containing the `~fermipy.utils.Map` objects
           for the residual significance and amplitude.    

        """
        timer = Timer.create(start=True)
        self.logger.info('Generating residual maps')

        schema = ConfigSchema(self.defaults['residmap'])

        config = schema.create_config(self.config['residmap'], **kwargs)

        # Defining default properties of test source model
        config['model'].setdefault('Index', 2.0)
        config['model'].setdefault('SpectrumType', 'PowerLaw')
        config['model'].setdefault('SpatialModel', 'PointSource')
        config['model'].setdefault('Prefactor', 1E-13)

        o = self._make_residual_map(prefix, **config)

        if config['make_plots']:
            plotter = plotting.AnalysisPlotter(self.config['plotting'],
                                               fileio=self.config['fileio'],
                                               logging=self.config['logging'])

            plotter.make_residmap_plots(o, self.roi)

        self.logger.info('Finished residual maps')

        outfile = utils.format_filename(self.workdir,
                                        'residmap',
                                        prefix=[o['name']])

        if config['write_fits']:
            o['file'] = os.path.basename(outfile) + '.fits'
            self._make_residmap_fits(o, outfile + '.fits')

        if config['write_npy']:
            np.save(outfile + '.npy', o)

        self.logger.info('Execution time: %.2f s', timer.elapsed_time)
        return o
예제 #2
0
    def residmap(self, prefix='', **kwargs):
        """Generate 2-D spatial residual maps using the current ROI
        model and the convolution kernel defined with the `model`
        argument.

        Parameters
        ----------
        prefix : str
            String that will be prefixed to the output residual map files.

        {options}

        Returns
        -------
        maps : dict
           A dictionary containing the `~fermipy.utils.Map` objects
           for the residual significance and amplitude.    

        """
        timer = Timer.create(start=True)
        self.logger.info('Generating residual maps')

        schema = ConfigSchema(self.defaults['residmap'])

        config = schema.create_config(self.config['residmap'], **kwargs)

        # Defining default properties of test source model
        config['model'].setdefault('Index', 2.0)
        config['model'].setdefault('SpectrumType', 'PowerLaw')
        config['model'].setdefault('SpatialModel', 'PointSource')
        config['model'].setdefault('Prefactor', 1E-13)

        o = self._make_residual_map(prefix, **config)

        if config['make_plots']:
            plotter = plotting.AnalysisPlotter(self.config['plotting'],
                                               fileio=self.config['fileio'],
                                               logging=self.config['logging'])

            plotter.make_residmap_plots(o, self.roi)

        self.logger.info('Finished residual maps')

        outfile = utils.format_filename(self.workdir, 'residmap',
                                        prefix=[o['name']])

        if config['write_fits']:
            o['file'] = os.path.basename(outfile) + '.fits'
            self._make_residmap_fits(o, outfile + '.fits')

        if config['write_npy']:
            np.save(outfile + '.npy', o)

        self.logger.info('Execution time: %.2f s', timer.elapsed_time)
        return o
예제 #3
0
파일: lightcurve.py 프로젝트: tuoyl/fermipy
    def lightcurve(self, name, **kwargs):
        """Generate a lightcurve for the named source. The function will
        complete the basic analysis steps for each bin and perform a
        likelihood fit for each bin. Extracted values (along with
        errors) are Integral Flux, spectral model, Spectral index, TS
        value, pred. # of photons. Note: successful calculation of 
        TS:subscript:`var` requires at least one free background 
        parameter and a previously optimized ROI model.

        Parameters
        ---------
        name: str
            source name

        {options}

        Returns
        ---------
        LightCurve : dict
           Dictionary containing output of the LC analysis

        """

        name = self.roi.get_source_by_name(name).name

        # Create schema for method configuration
        schema = ConfigSchema(self.defaults['lightcurve'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('prefix', '')
        config = utils.create_dict(self.config['lightcurve'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        self.logger.info('Computing Lightcurve for %s' % name)

        o = self._make_lc(name, **config)
        filename = utils.format_filename(
            self.workdir,
            'lightcurve',
            prefix=[config['prefix'],
                    name.lower().replace(' ', '_')])

        o['file'] = None
        if config['write_fits']:
            o['file'] = os.path.basename(filename) + '.fits'
            self._make_lc_fits(o, filename + '.fits', **config)

        if config['write_npy']:
            np.save(filename + '.npy', o)

        self.logger.info('Finished Lightcurve')

        return o
예제 #4
0
    def lightcurve(self, name, **kwargs):
        """Generate a lightcurve for the named source. The function will
        complete the basic analysis steps for each bin and perform a
        likelihood fit for each bin. Extracted values (along with
        errors) are Integral Flux, spectral model, Spectral index, TS
        value, pred. # of photons. Note: successful calculation of 
        TS:subscript:`var` requires at least one free background 
        parameter and a previously optimized ROI model.

        Parameters
        ---------
        name: str
            source name

        {options}

        Returns
        ---------
        LightCurve : dict
           Dictionary containing output of the LC analysis

        """

        name = self.roi.get_source_by_name(name).name

        # Create schema for method configuration
        schema = ConfigSchema(self.defaults['lightcurve'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('prefix', '')
        config = utils.create_dict(self.config['lightcurve'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        self.logger.info('Computing Lightcurve for %s' % name)

        o = self._make_lc(name, **config)
        filename = utils.format_filename(self.workdir, 'lightcurve',
                                         prefix=[config['prefix'],
                                                 name.lower().replace(' ', '_')])

        o['file'] = None
        if config['write_fits']:
            o['file'] = os.path.basename(filename) + '.fits'
            self._make_lc_fits(o, filename + '.fits', **config)

        if config['write_npy']:
            np.save(filename + '.npy', o)

        self.logger.info('Finished Lightcurve')

        return o
예제 #5
0
    def tsmap(self, prefix='', **kwargs):
        """Generate a spatial TS map for a source component with
        properties defined by the `model` argument.  The TS map will
        have the same geometry as the ROI.  The output of this method
        is a dictionary containing `~fermipy.skymap.Map` objects with
        the TS and amplitude of the best-fit test source.  By default
        this method will also save maps to FITS files and render them
        as image files.

        This method uses a simplified likelihood fitting
        implementation that only fits for the normalization of the
        test source.  Before running this method it is recommended to
        first optimize the ROI model (e.g. by running
        :py:meth:`~fermipy.gtanalysis.GTAnalysis.optimize`).

        Parameters
        ----------
        prefix : str
           Optional string that will be prepended to all output files.

        {options}

        Returns
        -------
        tsmap : dict
           A dictionary containing the `~fermipy.skymap.Map` objects
           for TS and source amplitude.

        """
        timer = Timer.create(start=True)

        schema = ConfigSchema(self.defaults['tsmap'])
        schema.add_option('loglevel', logging.INFO)
        schema.add_option('map_skydir', None, '', astropy.coordinates.SkyCoord)
        schema.add_option('map_size', 1.0)
        schema.add_option('threshold', 1E-2, '', float)
        schema.add_option('use_pylike', True, '', bool)
        schema.add_option('outfile', None, '', str)
        config = schema.create_config(self.config['tsmap'], **kwargs)

        # Defining default properties of test source model
        config['model'].setdefault('Index', 2.0)
        config['model'].setdefault('SpectrumType', 'PowerLaw')
        config['model'].setdefault('SpatialModel', 'PointSource')

        self.logger.log(config['loglevel'], 'Generating TS map')

        o = self._make_tsmap_fast(prefix, **config)

        if config['make_plots']:
            plotter = plotting.AnalysisPlotter(self.config['plotting'],
                                               fileio=self.config['fileio'],
                                               logging=self.config['logging'])

            plotter.make_tsmap_plots(o, self.roi)

        self.logger.log(config['loglevel'], 'Finished TS map')

        outfile = config.get('outfile', None)
        if outfile is None:
            outfile = utils.format_filename(self.workdir,
                                            'tsmap',
                                            prefix=[o['name']])
        else:
            outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0])

        if config['write_fits']:
            o['file'] = os.path.basename(outfile) + '.fits'
            self._make_tsmap_fits(o, outfile + '.fits')

        if config['write_npy']:
            np.save(outfile + '.npy', o)

        self.logger.log(config['loglevel'], 'Execution time: %.2f s',
                        timer.elapsed_time)
        return o
예제 #6
0
    def find_sources(self, prefix='', **kwargs):
        """An iterative source-finding algorithm that uses likelihood
        ratio (TS) maps of the region of interest to find new sources.
        After each iteration a new TS map is generated incorporating
        sources found in the previous iteration.  The method stops
        when the number of iterations exceeds ``max_iter`` or no
        sources exceeding ``sqrt_ts_threshold`` are found.

        Parameters
        ----------
        {options}

        tsmap : dict
           Keyword arguments dictionary for tsmap method.

        tscube : dict
           Keyword arguments dictionary for tscube method.

        Returns
        -------

        peaks : list
           List of peak objects.

        sources : list
           List of source objects.

        """

        self.logger.info('Starting.')

        schema = ConfigSchema(self.defaults['sourcefind'],
                              tsmap=self.defaults['tsmap'],
                              tscube=self.defaults['tscube'])

        schema.add_option('search_skydir', None, '', SkyCoord)
        schema.add_option('search_minmax_radius', [None, 1.0], '', list)

        config = utils.create_dict(self.config['sourcefind'],
                                   tsmap=self.config['tsmap'],
                                   tscube=self.config['tscube'])
        config = schema.create_config(config, **kwargs)

        # Defining default properties of test source model
        config['model'].setdefault('Index', 2.0)
        config['model'].setdefault('SpectrumType', 'PowerLaw')
        config['model'].setdefault('SpatialModel', 'PointSource')
        config['model'].setdefault('Prefactor', 1E-13)

        o = {'sources': [], 'peaks': []}

        for i in range(config['max_iter']):
            srcs, peaks = self._find_sources_iterate(prefix, i, **config)

            self.logger.info('Found %i sources in iteration %i.' %
                             (len(srcs), i))

            o['sources'] += srcs
            o['peaks'] += peaks
            if len(srcs) == 0:
                break

        self.logger.info('Done.')

        return o
예제 #7
0
    def localize(self, name, **kwargs):
        """Find the best-fit position of a source.  Localization is
        performed in two steps.  First a TS map is computed centered
        on the source with half-width set by ``dtheta_max``.  A fit is
        then performed to the maximum TS peak in this map.  The source
        position is then further refined by scanning the likelihood in
        the vicinity of the peak found in the first step.  The size of
        the scan region is set to encompass the 99% positional
        uncertainty contour as determined from the peak fit.

        Parameters
        ----------
        name : str
            Source name.

        {options}

        optimizer : dict
            Dictionary that overrides the default optimizer settings.

        Returns
        -------
        localize : dict
            Dictionary containing results of the localization
            analysis.

        """
        name = self.roi.get_source_by_name(name).name

        schema = ConfigSchema(self.defaults['localize'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('use_cache', True)
        schema.add_option('prefix', '')
        config = utils.create_dict(self.config['localize'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        self.logger.info('Running localization for %s' % name)

        free_state = FreeParameterState(self)
        loc = self._localize(name, **config)
        free_state.restore()

        self.logger.info('Finished localization.')

        if config['make_plots']:
            self._plotter.make_localization_plots(loc, self.roi,
                                                  prefix=config['prefix'])

        outfile = \
            utils.format_filename(self.workdir, 'loc',
                                  prefix=[config['prefix'],
                                          name.lower().replace(' ', '_')])

        if config['write_fits']:
            loc['file'] = os.path.basename(outfile) + '.fits'
            self._make_localize_fits(loc, outfile + '.fits',
                                     **config)

        if config['write_npy']:
            np.save(outfile + '.npy', dict(loc))

        return loc
예제 #8
0
    def extension(self, name, **kwargs):
        """Test this source for spatial extension with the likelihood
        ratio method (TS_ext).  This method will substitute an
        extended spatial model for the given source and perform a
        one-dimensional scan of the spatial extension parameter over
        the range specified with the width parameters.  The 1-D
        profile likelihood is then used to compute the best-fit value,
        upper limit, and TS for extension.  The nuisance parameters
        that will be simultaneously fit when performing the spatial
        scan can be controlled with the ``fix_shape``,
        ``free_background``, and ``free_radius`` options.  By default
        the position of the source will be fixed to its current
        position.  A simultaneous fit to position and extension can be
        performed by setting ``fit_position`` to True.

        Parameters
        ----------
        name : str
            Source name.

        {options}

        optimizer : dict
            Dictionary that overrides the default optimizer settings.

        Returns
        -------
        extension : dict
            Dictionary containing results of the extension analysis.  The same
            dictionary is also saved to the dictionary of this source under
            'extension'.
        """
        timer = Timer.create(start=True)
        name = self.roi.get_source_by_name(name).name

        schema = ConfigSchema(self.defaults['extension'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('prefix', '')
        schema.add_option('outfile', None, '', str)
        config = utils.create_dict(self.config['extension'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        self.logger.info('Running extension fit for %s', name)

        free_state = FreeParameterState(self)
        ext = self._extension(name, **config)
        free_state.restore()

        self.logger.info('Finished extension fit.')

        if config['make_plots']:
            self._plotter.make_extension_plots(ext,
                                               self.roi,
                                               prefix=config['prefix'])

        outfile = config.get('outfile', None)
        if outfile is None:
            outfile = utils.format_filename(
                self.workdir,
                'ext',
                prefix=[config['prefix'],
                        name.lower().replace(' ', '_')])
        else:
            outfile = os.path.join(self.workdir, os.path.splitext(outfile)[0])

        if config['write_fits']:
            self._make_extension_fits(ext, outfile + '.fits')

        if config['write_npy']:
            o_copy = dict(ext)
            self.logger.warning(
                'Saving maps in .npy files is disabled b/c of incompatibilities in python3, remove the maps from the %s.npy'
                % outfile)
            print(o_copy)
            for xrm in ['tsmap']:
                o_copy.pop(xrm)
            np.save(outfile + '.npy', o_copy)

        self.logger.info('Execution time: %.2f s', timer.elapsed_time)
        return ext
예제 #9
0
파일: sourcefind.py 프로젝트: NAH8/fermipy
    def localize(self, name, **kwargs):
        """Find the best-fit position of a source.  Localization is
        performed in two steps.  First a TS map is computed centered
        on the source with half-width set by ``dtheta_max``.  A fit is
        then performed to the maximum TS peak in this map.  The source
        position is then further refined by scanning the likelihood in
        the vicinity of the peak found in the first step.  The size of
        the scan region is set to encompass the 99% positional
        uncertainty contour as determined from the peak fit.

        Parameters
        ----------
        name : str
            Source name.

        dtheta_max : float
            Maximum offset in RA/DEC in deg from the nominal source
            position that will be used to define the boundaries of the
            TS map search region.

        nstep : int
            Number of steps in longitude/latitude that will be taken
            when refining the source position.  The bounds of the scan
            range are set to the 99% positional uncertainty as
            determined from the TS map peak fit.  The total number of
            sampling points will be nstep**2.

        fix_background : bool
            Fix background parameters when fitting the source position.

        update : bool
            Update the model for this source with the best-fit
            position.  If newname=None this will overwrite the
            existing source map of this source with one corresponding
            to its new location.

        newname : str
            Name that will be assigned to the relocalized source
            when update=True.  If newname is None then the existing
            source name will be used.

        make_plots : bool
           Generate plots.

        write_fits : bool
           Write the output to a FITS file.

        write_npy : bool
           Write the output dictionary to a numpy file.

        optimizer : dict
            Dictionary that overrides the default optimizer settings.

        Returns
        -------
        localize : dict
            Dictionary containing results of the localization
            analysis.  This dictionary is also saved to the
            dictionary of this source in 'localize'.

        """

        name = self.roi.get_source_by_name(name).name

        schema = ConfigSchema(self.defaults['localize'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('make_plots', False)
        schema.add_option('write_fits', True)
        schema.add_option('write_npy', True)
        schema.add_option('newname', name)
        schema.add_option('prefix', '')
        config = utils.create_dict(self.config['localize'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        nstep = config['nstep']
        dtheta_max = config['dtheta_max']
        update = config['update']
        newname = config['newname']
        prefix = config['prefix']

        self.logger.info('Running localization for %s' % name)

        saved_state = LikelihoodState(self.like)

        if config['fix_background']:
            self.free_sources(free=False, loglevel=logging.DEBUG)

        src = self.roi.copy_source(name)
        skydir = src.skydir
        skywcs = self._skywcs
        src_pix = skydir.to_pixel(skywcs)

        tsmap_fit, tsmap = self._localize_tsmap(name,
                                                prefix=prefix,
                                                dtheta_max=dtheta_max)

        self.logger.debug(
            'Completed localization with TS Map.\n'
            '(ra,dec) = (%10.4f,%10.4f)\n'
            '(glon,glat) = (%10.4f,%10.4f)', tsmap_fit['ra'], tsmap_fit['dec'],
            tsmap_fit['glon'], tsmap_fit['glat'])

        # Fit baseline (point-source) model
        self.free_norm(name)
        fit_output = self._fit(loglevel=logging.DEBUG, **config['optimizer'])

        # Save likelihood value for baseline fit
        loglike0 = fit_output['loglike']
        self.logger.debug('Baseline Model Likelihood: %f', loglike0)

        self.zero_source(name)

        o = {
            'name': name,
            'config': config,
            'fit_success': True,
            'loglike_base': loglike0,
            'loglike_loc': np.nan,
            'dloglike_loc': np.nan
        }

        cdelt0 = np.abs(skywcs.wcs.cdelt[0])
        cdelt1 = np.abs(skywcs.wcs.cdelt[1])
        scan_step = 2.0 * tsmap_fit['r95'] / (nstep - 1.0)

        self.logger.debug(
            'Refining localization search to '
            'region of width: %.4f deg', tsmap_fit['r95'])

        scan_map = Map.create(SkyCoord(tsmap_fit['ra'],
                                       tsmap_fit['dec'],
                                       unit='deg'),
                              scan_step, (nstep, nstep),
                              coordsys=wcs_utils.get_coordsys(skywcs))

        scan_skydir = scan_map.get_pixel_skydirs()

        lnlscan = dict(wcs=scan_map.wcs.to_header().items(),
                       loglike=np.zeros((nstep, nstep)),
                       dloglike=np.zeros((nstep, nstep)),
                       dloglike_fit=np.zeros((nstep, nstep)))

        for i, t in enumerate(scan_skydir):
            model_name = '%s_localize' % (name.replace(' ', '').lower())
            src.set_name(model_name)
            src.set_position(t)
            self.add_source(model_name,
                            src,
                            free=True,
                            init_source=False,
                            save_source_maps=False,
                            loglevel=logging.DEBUG)
            fit_output = self._fit(loglevel=logging.DEBUG,
                                   **config['optimizer'])

            loglike1 = fit_output['loglike']
            lnlscan['loglike'].flat[i] = loglike1
            self.delete_source(model_name, loglevel=logging.DEBUG)

        lnlscan['dloglike'] = lnlscan['loglike'] - np.max(lnlscan['loglike'])
        scan_tsmap = Map(2.0 * lnlscan['dloglike'].T, scan_map.wcs)

        self.unzero_source(name)
        saved_state.restore()
        self._sync_params(name)
        self._update_roi()

        scan_fit, new_skydir = fit_error_ellipse(scan_tsmap, dpix=3)
        o.update(scan_fit)

        o['loglike_loc'] = np.max(
            lnlscan['loglike']) + 0.5 * scan_fit['offset']
        o['dloglike_loc'] = o['loglike_loc'] - o['loglike_base']

        # lnlscan['dloglike_fit'] = \
        #   utils.parabola(np.linspace(0,nstep-1.0,nstep)[:,np.newaxis],
        #                  np.linspace(0,nstep-1.0,nstep)[np.newaxis,:],
        #                  *scan_fit['popt']).reshape((nstep,nstep))

        o['lnlscan'] = lnlscan

        # Best fit position and uncertainty from fit to TS map
        o['tsmap_fit'] = tsmap_fit

        # Best fit position and uncertainty from pylike scan
        o['scan_fit'] = scan_fit
        pix = new_skydir.to_pixel(skywcs)
        o['xpix'] = float(pix[0])
        o['ypix'] = float(pix[1])
        o['deltax'] = (o['xpix'] - src_pix[0]) * cdelt0
        o['deltay'] = (o['ypix'] - src_pix[1]) * cdelt1

        o['offset'] = skydir.separation(new_skydir).deg

        if o['offset'] > dtheta_max:
            o['fit_success'] = False

        if not o['fit_success']:
            self.logger.error(
                'Localization failed.\n'
                '(ra,dec) = (%10.4f,%10.4f)\n'
                '(glon,glat) = (%10.4f,%10.4f)\n'
                'offset = %8.4f deltax = %8.4f '
                'deltay = %8.4f', o['ra'], o['dec'], o['glon'], o['glat'],
                o['offset'], o['deltax'], o['deltay'])
        else:
            self.logger.info(
                'Localization succeeded with '
                'coordinates:\n'
                '(ra,dec) = (%10.4f,%10.4f)\n'
                '(glon,glat) = (%10.4f,%10.4f)\n'
                'offset = %8.4f r68 = %8.4f', o['ra'], o['dec'], o['glon'],
                o['glat'], o['offset'], o['r68'])

        self.roi[name]['localize'] = copy.deepcopy(o)

        if config['make_plots']:
            self._plotter.make_localization_plots(o,
                                                  tsmap,
                                                  self.roi,
                                                  prefix=prefix,
                                                  skydir=scan_skydir)

        if update and o['fit_success']:
            self.logger.info('Updating source %s '
                             'to localized position.', name)
            src = self.delete_source(name)
            src.set_position(new_skydir)
            src.set_name(newname, names=src.names)

            self.add_source(newname, src, free=True)
            fit_output = self.fit(loglevel=logging.DEBUG)
            o['loglike_loc'] = fit_output['loglike']
            o['dloglike_loc'] = o['loglike_loc'] - o['loglike_base']
            src = self.roi.get_source_by_name(newname)
            self.roi[newname]['localize'] = copy.deepcopy(o)
            self.logger.info('LogLike: %12.3f DeltaLogLike: %12.3f',
                             o['loglike_loc'], o['dloglike_loc'])

        if o['fit_success']:
            src = self.roi.get_source_by_name(newname)
            src['pos_sigma'] = o['sigma']
            src['pos_sigma_semimajor'] = o['sigma_semimajor']
            src['pos_sigma_semiminor'] = o['sigma_semiminor']
            src['pos_r68'] = o['r68']
            src['pos_r95'] = o['r95']
            src['pos_r99'] = o['r99']
            src['pos_angle'] = np.degrees(o['theta'])

        self.logger.info('Finished localization.')
        return o
예제 #10
0
    def localize(self, name, **kwargs):
        """Find the best-fit position of a source.  Localization is
        performed in two steps.  First a TS map is computed centered
        on the source with half-width set by ``dtheta_max``.  A fit is
        then performed to the maximum TS peak in this map.  The source
        position is then further refined by scanning the likelihood in
        the vicinity of the peak found in the first step.  The size of
        the scan region is set to encompass the 99% positional
        uncertainty contour as determined from the peak fit.

        Parameters
        ----------
        name : str
            Source name.

        {options}

        optimizer : dict
            Dictionary that overrides the default optimizer settings.

        Returns
        -------
        localize : dict
            Dictionary containing results of the localization
            analysis.

        """
        timer = Timer.create(start=True)
        name = self.roi.get_source_by_name(name).name

        schema = ConfigSchema(self.defaults['localize'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('use_cache', True)
        schema.add_option('prefix', '')
        config = utils.create_dict(self.config['localize'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        self.logger.info('Running localization for %s' % name)

        free_state = FreeParameterState(self)
        loc = self._localize(name, **config)
        free_state.restore()

        self.logger.info('Finished localization.')

        if config['make_plots']:
            self._plotter.make_localization_plots(loc, self.roi,
                                                  prefix=config['prefix'])

        outfile = \
            utils.format_filename(self.workdir, 'loc',
                                  prefix=[config['prefix'],
                                          name.lower().replace(' ', '_')])

        if config['write_fits']:
            loc['file'] = os.path.basename(outfile) + '.fits'
            self._make_localize_fits(loc, outfile + '.fits',
                                     **config)

        if config['write_npy']:
            np.save(outfile + '.npy', dict(loc))

        self.logger.info('Execution time: %.2f s', timer.elapsed_time)
        return loc
예제 #11
0
파일: tsmap.py 프로젝트: jefemagril/fermipy
    def tsmap(self, prefix='', **kwargs):
        """Generate a spatial TS map for a source component with
        properties defined by the `model` argument.  The TS map will
        have the same geometry as the ROI.  The output of this method
        is a dictionary containing `~fermipy.skymap.Map` objects with
        the TS and amplitude of the best-fit test source.  By default
        this method will also save maps to FITS files and render them
        as image files.

        This method uses a simplified likelihood fitting
        implementation that only fits for the normalization of the
        test source.  Before running this method it is recommended to
        first optimize the ROI model (e.g. by running
        :py:meth:`~fermipy.gtanalysis.GTAnalysis.optimize`).

        Parameters
        ----------
        prefix : str
           Optional string that will be prepended to all output files.

        {options}

        Returns
        -------
        tsmap : dict
           A dictionary containing the `~fermipy.skymap.Map` objects
           for TS and source amplitude.

        """
        timer = Timer.create(start=True)

        schema = ConfigSchema(self.defaults['tsmap'])
        schema.add_option('loglevel', logging.INFO)
        schema.add_option('map_skydir', None, '', astropy.coordinates.SkyCoord)
        schema.add_option('map_size', 1.0)
        schema.add_option('threshold', 1E-2, '', float)
        schema.add_option('use_pylike', True, '', bool)
        schema.add_option('outfile', None, '', str)
        config = schema.create_config(self.config['tsmap'], **kwargs)

        # Defining default properties of test source model
        config['model'].setdefault('Index', 2.0)
        config['model'].setdefault('SpectrumType', 'PowerLaw')
        config['model'].setdefault('SpatialModel', 'PointSource')

        self.logger.log(config['loglevel'], 'Generating TS map')

        o = self._make_tsmap_fast(prefix, **config)

        if config['make_plots']:
            plotter = plotting.AnalysisPlotter(self.config['plotting'],
                                               fileio=self.config['fileio'],
                                               logging=self.config['logging'])

            plotter.make_tsmap_plots(o, self.roi)

        self.logger.log(config['loglevel'], 'Finished TS map')

        outfile = config.get('outfile', None)
        if outfile is None:
            outfile = utils.format_filename(self.workdir, 'tsmap',
                                            prefix=[o['name']])
        else:
            outfile = os.path.join(self.workdir,
                                   os.path.splitext(outfile)[0])

        if config['write_fits']:
            o['file'] = os.path.basename(outfile) + '.fits'
            self._make_tsmap_fits(o, outfile + '.fits')

        if config['write_npy']:
            np.save(outfile + '.npy', o)

        self.logger.log(config['loglevel'],
                        'Execution time: %.2f s', timer.elapsed_time)
        return o
예제 #12
0
    def extension(self, name, **kwargs):
        """Test this source for spatial extension with the likelihood
        ratio method (TS_ext).  This method will substitute an
        extended spatial model for the given source and perform a
        one-dimensional scan of the spatial extension parameter over
        the range specified with the width parameters.  The 1-D
        profile likelihood is then used to compute the best-fit value,
        upper limit, and TS for extension.  The nuisance parameters
        that will be simultaneously fit when performing the spatial
        scan can be controlled with the ``fix_shape``,
        ``free_background``, and ``free_radius`` options.  By default
        the position of the source will be fixed to its current
        position.  A simultaneous fit to position and extension can be
        performed by setting ``fit_position`` to True.

        Parameters
        ----------
        name : str
            Source name.

        {options}

        optimizer : dict
            Dictionary that overrides the default optimizer settings.

        Returns
        -------
        extension : dict
            Dictionary containing results of the extension analysis.  The same
            dictionary is also saved to the dictionary of this source under
            'extension'.
        """
        timer = Timer.create(start=True)
        name = self.roi.get_source_by_name(name).name

        schema = ConfigSchema(self.defaults['extension'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('prefix', '')
        schema.add_option('outfile', None, '', str)
        config = utils.create_dict(self.config['extension'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        self.logger.info('Running extension fit for %s', name)

        free_state = FreeParameterState(self)
        ext = self._extension(name, **config)
        free_state.restore()

        self.logger.info('Finished extension fit.')

        if config['make_plots']:
            self._plotter.make_extension_plots(ext, self.roi,
                                               prefix=config['prefix'])

        outfile = config.get('outfile', None)
        if outfile is None:
            outfile = utils.format_filename(self.workdir, 'ext',
                                            prefix=[config['prefix'],
                                                    name.lower().replace(' ', '_')])
        else:
            outfile = os.path.join(self.workdir,
                                   os.path.splitext(outfile)[0])

        if config['write_fits']:
            self._make_extension_fits(ext, outfile + '.fits')

        if config['write_npy']:
            np.save(outfile + '.npy', dict(ext))

        self.logger.info('Execution time: %.2f s', timer.elapsed_time)
        return ext
예제 #13
0
    def tsmap(self, prefix='', **kwargs):
        """Generate a spatial TS map for a source component with
        properties defined by the `model` argument.  The TS map will
        have the same geometry as the ROI.  The output of this method
        is a dictionary containing `~fermipy.skymap.Map` objects with
        the TS and amplitude of the best-fit test source.  By default
        this method will also save maps to FITS files and render them
        as image files.

        This method uses a simplified likelihood fitting
        implementation that only fits for the normalization of the
        test source.  Before running this method it is recommended to
        first optimize the ROI model (e.g. by running
        :py:meth:`~fermipy.gtanalysis.GTAnalysis.optimize`).

        Parameters
        ----------
        prefix : str
           Optional string that will be prepended to all output files
           (FITS and rendered images).

        model : dict
           Dictionary defining the properties of the test source.

        exclude : list
            Source or sources that will be removed from the model when
            computing the TS map.

        loge_bounds : list
           Restrict the analysis to an energy range (emin,emax) in
           log10(E/MeV) that is a subset of the analysis energy range.
           By default the full analysis energy range will be used.  If
           either emin/emax are None then only an upper/lower bound on
           the energy range wil be applied.

        max_kernel_radius : float
           Set the maximum radius of the test source kernel.  Using a
           smaller value will speed up the TS calculation at the loss of
           accuracy.  The default value is 3 degrees.

        make_plots : bool
           Generate plots.

        write_fits : bool
           Write the output to a FITS file.

        write_npy : bool
           Write the output dictionary to a numpy file.

        Returns
        -------
        maps : dict
           A dictionary containing the `~fermipy.skymap.Map` objects
           for TS and source amplitude.

        """

        self.logger.info('Generating TS map')

        schema = ConfigSchema(self.defaults['tsmap'])
        schema.add_option('make_plots', False)
        schema.add_option('write_fits', True)
        schema.add_option('write_npy', True)
        schema.add_option('map_skydir', None, '', astropy.coordinates.SkyCoord)
        schema.add_option('map_size', 1.0)
        schema.add_option('exclude', None, '', list)
        schema.add_option('threshold', 1E-2, '', float)
        config = schema.create_config(self.config['tsmap'], **kwargs)

        # Defining default properties of test source model
        config['model'].setdefault('Index', 2.0)
        config['model'].setdefault('SpectrumType', 'PowerLaw')
        config['model'].setdefault('SpatialModel', 'PointSource')
        config['model'].setdefault('Prefactor', 1E-13)

        maps = self._make_tsmap_fast(prefix, **config)

        if config['make_plots']:
            plotter = plotting.AnalysisPlotter(self.config['plotting'],
                                               fileio=self.config['fileio'],
                                               logging=self.config['logging'])

            plotter.make_tsmap_plots(maps, self.roi)

        self.logger.info('Finished TS map')
        return maps
예제 #14
0
파일: sed.py 프로젝트: NAH8/fermipy
    def sed(self, name, **kwargs):
        """Generate a spectral energy distribution (SED) for a source.  This
        function will fit the normalization of the source in each
        energy bin.  By default the SED will be generated with the
        analysis energy bins but a custom binning can be defined with
        the ``loge_bins`` parameter.

        Parameters
        ----------
        name : str
            Source name.

        prefix : str
            Optional string that will be prepended to all output files
            (FITS and rendered images).

        loge_bins : `~numpy.ndarray`
            Sequence of energies in log10(E/MeV) defining the edges of
            the energy bins.  If this argument is None then the
            analysis energy bins will be used.  The energies in this
            sequence must align with the bin edges of the underyling
            analysis instance.

        bin_index : float
            Spectral index that will be use when fitting the energy
            distribution within an energy bin.

        use_local_index : bool
            Use a power-law approximation to the shape of the global
            spectrum in each bin.  If this is false then a constant
            index set to `bin_index` will be used.

        fix_background : bool
            Fix background components when fitting the flux
            normalization in each energy bin.  If fix_background=False
            then all background parameters that are currently free in
            the fit will be profiled.  By default fix_background=True.

        ul_confidence : float
            Set the confidence level that will be used for the
            calculation of flux upper limits in each energy bin.

        cov_scale : float
            Scaling factor that will be applied when setting the
            gaussian prior on the normalization of free background
            sources.  If this parameter is None then no gaussian prior
            will be applied.

        make_plots : bool
            Generate plots.

        write_fits : bool
            Write the output to a FITS file.

        write_npy : bool
            Write the output dictionary to a numpy file.

        optimizer : dict
            Dictionary that overrides the default optimizer settings.

        Returns
        -------
        sed : dict
            Dictionary containing output of the SED analysis.

        """

        name = self.roi.get_source_by_name(name).name

        # Create schema for method configuration
        schema = ConfigSchema(self.defaults['sed'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('prefix', '')
        schema.add_option('make_plots', False)
        schema.add_option('write_fits', True)
        schema.add_option('write_npy', True)
        schema.add_option('loge_bins', None, '', list)
        config = utils.create_dict(self.config['sed'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        self.logger.info('Computing SED for %s' % name)

        o = self._make_sed(name, **config)
        filename = \
            utils.format_filename(self.workdir, 'sed',
                                  prefix=[config['prefix'],
                                          name.lower().replace(' ', '_')])

        o['file'] = None
        if config['write_fits']:
            o['file'] = os.path.basename(filename) + '.fits'
            self._make_sed_fits(o, filename + '.fits', **config)

        if config['write_npy']:
            np.save(filename + '.npy', o)

        if config['make_plots']:
            self._plotter.make_sed_plots(o, **config)

        self.logger.info('Finished SED')

        return o
예제 #15
0
    def tscube(self, prefix='', **kwargs):
        """Generate a spatial TS map for a source component with
        properties defined by the `model` argument.  This method uses
        the `gttscube` ST application for source fitting and will
        simultaneously fit the test source normalization as well as
        the normalizations of any background components that are
        currently free.  The output of this method is a dictionary
        containing `~fermipy.skymap.Map` objects with the TS and
        amplitude of the best-fit test source.  By default this method
        will also save maps to FITS files and render them as image
        files.

        Parameters
        ----------

        prefix : str
           Optional string that will be prepended to all output files
           (FITS and rendered images).

        model : dict
           Dictionary defining the properties of the test source.

        do_sed : bool
           Compute the energy bin-by-bin fits.

        nnorm : int
           Number of points in the likelihood v. normalization scan.

        norm_sigma : float
           Number of sigma to use for the scan range.

        tol : float
           Critetia for fit convergence (estimated vertical distance
           to min < tol ).

        tol_type : int
           Absoulte (0) or relative (1) criteria for convergence.

        max_iter : int
           Maximum number of iterations for the Newton's method fitter

        remake_test_source : bool
           If true, recomputes the test source image (otherwise just shifts it)

        st_scan_level : int

        make_plots : bool
           Write image files.

        write_fits : bool
           Write a FITS file with the results of the analysis.

        Returns
        -------

        maps : dict
           A dictionary containing the `~fermipy.skymap.Map` objects
           for TS and source amplitude.

        """

        self.logger.info('Generating TS cube')

        schema = ConfigSchema(self.defaults['tscube'])
        schema.add_option('make_plots', True)
        schema.add_option('write_fits', True)
        schema.add_option('write_npy', True)
        config = schema.create_config(self.config['tscube'], **kwargs)

        maps = self._make_ts_cube(prefix, **config)

        if config['make_plots']:
            plotter = plotting.AnalysisPlotter(self.config['plotting'],
                                               fileio=self.config['fileio'],
                                               logging=self.config['logging'])

            plotter.make_tsmap_plots(maps, self.roi, suffix='tscube')

        self.logger.info("Finished TS cube")
        return maps
예제 #16
0
파일: tsmap.py 프로젝트: jefemagril/fermipy
    def tscube(self,  prefix='', **kwargs):
        """Generate a spatial TS map for a source component with
        properties defined by the `model` argument.  This method uses
        the `gttscube` ST application for source fitting and will
        simultaneously fit the test source normalization as well as
        the normalizations of any background components that are
        currently free.  The output of this method is a dictionary
        containing `~fermipy.skymap.Map` objects with the TS and
        amplitude of the best-fit test source.  By default this method
        will also save maps to FITS files and render them as image
        files.

        Parameters
        ----------

        prefix : str
           Optional string that will be prepended to all output files
           (FITS and rendered images).

        model : dict
           Dictionary defining the properties of the test source.

        do_sed : bool
           Compute the energy bin-by-bin fits.

        nnorm : int
           Number of points in the likelihood v. normalization scan.

        norm_sigma : float
           Number of sigma to use for the scan range.

        tol : float
           Critetia for fit convergence (estimated vertical distance
           to min < tol ).

        tol_type : int
           Absoulte (0) or relative (1) criteria for convergence.

        max_iter : int
           Maximum number of iterations for the Newton's method fitter

        remake_test_source : bool
           If true, recomputes the test source image (otherwise just shifts it)

        st_scan_level : int

        make_plots : bool
           Write image files.

        write_fits : bool
           Write a FITS file with the results of the analysis.

        Returns
        -------

        maps : dict
           A dictionary containing the `~fermipy.skymap.Map` objects
           for TS and source amplitude.

        """

        self.logger.info('Generating TS cube')

        schema = ConfigSchema(self.defaults['tscube'])
        schema.add_option('make_plots', True)
        schema.add_option('write_fits', True)
        schema.add_option('write_npy', True)
        config = schema.create_config(self.config['tscube'], **kwargs)

        maps = self._make_ts_cube(prefix, **config)

        if config['make_plots']:
            plotter = plotting.AnalysisPlotter(self.config['plotting'],
                                               fileio=self.config['fileio'],
                                               logging=self.config['logging'])

            plotter.make_tsmap_plots(maps, self.roi, suffix='tscube')

        self.logger.info("Finished TS cube")
        return maps
예제 #17
0
    def sed(self, name, **kwargs):
        """Generate a spectral energy distribution (SED) for a source.  This
        function will fit the normalization of the source in each
        energy bin.  By default the SED will be generated with the
        analysis energy bins but a custom binning can be defined with
        the ``loge_bins`` parameter.

        Parameters
        ----------
        name : str
            Source name.

        prefix : str
            Optional string that will be prepended to all output files
            (FITS and rendered images).

        loge_bins : `~numpy.ndarray`
            Sequence of energies in log10(E/MeV) defining the edges of
            the energy bins.  If this argument is None then the
            analysis energy bins will be used.  The energies in this
            sequence must align with the bin edges of the underyling
            analysis instance.

        {options}

        optimizer : dict
            Dictionary that overrides the default optimizer settings.

        Returns
        -------
        sed : dict
            Dictionary containing output of the SED analysis.

        """
        timer = Timer.create(start=True)
        name = self.roi.get_source_by_name(name).name

        # Create schema for method configuration
        schema = ConfigSchema(self.defaults['sed'],
                              optimizer=self.defaults['optimizer'])
        schema.add_option('prefix', '')
        schema.add_option('outfile', None, '', str)
        schema.add_option('loge_bins', None, '', list)
        config = utils.create_dict(self.config['sed'],
                                   optimizer=self.config['optimizer'])
        config = schema.create_config(config, **kwargs)

        self.logger.info('Computing SED for %s' % name)

        o = self._make_sed(name, **config)

        self.logger.info('Finished SED')

        outfile = config.get('outfile', None)
        if outfile is None:
            outfile = utils.format_filename(self.workdir, 'sed',
                                            prefix=[config['prefix'],
                                                    name.lower().replace(' ', '_')])
        else:
            outfile = os.path.join(self.workdir,
                                   os.path.splitext(outfile)[0])

        o['file'] = None
        if config['write_fits']:
            o['file'] = os.path.basename(outfile) + '.fits'
            self._make_sed_fits(o, outfile + '.fits', **config)

        if config['write_npy']:
            np.save(outfile + '.npy', o)

        if config['make_plots']:
            self._plotter.make_sed_plots(o, **config)

        self.logger.info('Execution time: %.2f s', timer.elapsed_time)
        return o
예제 #18
0
    def find_sources(self, prefix='', **kwargs):
        """An iterative source-finding algorithm that uses likelihood
        ratio (TS) maps of the region of interest to find new sources.
        After each iteration a new TS map is generated incorporating
        sources found in the previous iteration.  The method stops
        when the number of iterations exceeds ``max_iter`` or no
        sources exceeding ``sqrt_ts_threshold`` are found.

        Parameters
        ----------
        {options}

        tsmap : dict
           Keyword arguments dictionary for tsmap method.

        tscube : dict
           Keyword arguments dictionary for tscube method.

        Returns
        -------

        peaks : list
           List of peak objects.

        sources : list
           List of source objects.

        """
        timer = Timer.create(start=True)
        self.logger.info('Starting.')

        schema = ConfigSchema(self.defaults['sourcefind'],
                              tsmap=self.defaults['tsmap'],
                              tscube=self.defaults['tscube'])

        schema.add_option('search_skydir', None, '', SkyCoord)
        schema.add_option('search_minmax_radius', [None, 1.0], '', list)

        config = utils.create_dict(self.config['sourcefind'],
                                   tsmap=self.config['tsmap'],
                                   tscube=self.config['tscube'])
        config = schema.create_config(config, **kwargs)

        # Defining default properties of test source model
        config['model'].setdefault('Index', 2.0)
        config['model'].setdefault('SpectrumType', 'PowerLaw')
        config['model'].setdefault('SpatialModel', 'PointSource')
        config['model'].setdefault('Prefactor', 1E-13)

        o = {'sources': [], 'peaks': []}

        for i in range(config['max_iter']):
            srcs, peaks = self._find_sources_iterate(prefix, i, **config)

            self.logger.info('Found %i sources in iteration %i.' %
                             (len(srcs), i))

            o['sources'] += srcs
            o['peaks'] += peaks
            if len(srcs) == 0:
                break

        self.logger.info('Done.')
        self.logger.info('Execution time: %.2f s', timer.elapsed_time)

        return o
예제 #19
0
파일: sourcefind.py 프로젝트: NAH8/fermipy
    def find_sources(self, prefix='', **kwargs):
        """An iterative source-finding algorithm.

        Parameters
        ----------

        model : dict
           Dictionary defining the properties of the test source.
           This is the model that will be used for generating TS maps.

        sqrt_ts_threshold : float
           Source threshold in sqrt(TS).  Only peaks with sqrt(TS)
           exceeding this threshold will be used as seeds for new
           sources.

        min_separation : float
           Minimum separation in degrees of sources detected in each
           iteration. The source finder will look for the maximum peak
           in the TS map within a circular region of this radius.

        max_iter : int
           Maximum number of source finding iterations.  The source
           finder will continue adding sources until no additional
           peaks are found or the number of iterations exceeds this
           number.

        sources_per_iter : int
           Maximum number of sources that will be added in each
           iteration.  If the number of detected peaks in a given
           iteration is larger than this number, only the N peaks with
           the largest TS will be used as seeds for the current
           iteration.

        tsmap_fitter : str
           Set the method used internally for generating TS maps.
           Valid options:

           * tsmap
           * tscube

        tsmap : dict
           Keyword arguments dictionary for tsmap method.

        tscube : dict
           Keyword arguments dictionary for tscube method.


        Returns
        -------

        peaks : list
           List of peak objects.

        sources : list
           List of source objects.

        """

        self.logger.info('Starting.')

        schema = ConfigSchema(self.defaults['sourcefind'],
                              tsmap=self.defaults['tsmap'],
                              tscube=self.defaults['tscube'])

        schema.add_option('search_skydir', None, '', SkyCoord)
        schema.add_option('search_minmax_radius', [None, 1.0], '', list)

        config = utils.create_dict(self.config['sourcefind'],
                                   tsmap=self.config['tsmap'],
                                   tscube=self.config['tscube'])
        config = schema.create_config(config, **kwargs)

        # Defining default properties of test source model
        config['model'].setdefault('Index', 2.0)
        config['model'].setdefault('SpectrumType', 'PowerLaw')
        config['model'].setdefault('SpatialModel', 'PointSource')
        config['model'].setdefault('Prefactor', 1E-13)

        o = {'sources': [], 'peaks': []}

        for i in range(config['max_iter']):
            srcs, peaks = self._find_sources_iterate(prefix, i, **config)

            self.logger.info('Found %i sources in iteration %i.' %
                             (len(srcs), i))

            o['sources'] += srcs
            o['peaks'] += peaks
            if len(srcs) == 0:
                break

        self.logger.info('Done.')

        return o