示例#1
0
def main():
    """ Main function for command line usage """
    usage = "usage: %(prog)s [options] "
    description = "Merge a set of Fermi-LAT files."

    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('-o', '--output', default=None, type=str,
                        help='Output file.')
    parser.add_argument('--ccube', default=None, type=str,
                        help='Input counts cube file .')
    parser.add_argument('--bexpcube', default=None, type=str,
                        help='Input binned exposure cube.')
    parser.add_argument('--hpx_order', default=None, type=int,
                        help='Order of output map: default = counts map order')
    parser.add_argument('--clobber', action='store_true',
                        help='Overwrite output file')
 
    args = parser.parse_args()

    ccube = HpxMap.create_from_fits(args.ccube, hdu='SKYMAP')
    bexpcube = HpxMap.create_from_fits(args.bexpcube, hdu='HPXEXPOSURES')
    
    if args.hpx_order:
        hpx_order = args.hpx_order
    else:
        hpx_order = ccube.hpx.order

    out_cube = intensity_cube(ccube, bexpcube, hpx_order)
    out_cube.hpx.write_fits(out_cube.data, args.output, clobber=args.clobber)
示例#2
0
    def copy_ccube(ccube, outsrcmap, hpx_order):
        """Copy a counts cube into outsrcmap file
        reducing the HEALPix order to hpx_order if needed.
        """
        sys.stdout.write("  Copying counts cube from %s to %s\n" % (ccube, outsrcmap))
        try:
            hdulist_in = fits.open(ccube)
        except IOError:
            hdulist_in = fits.open("%s.gz" % ccube)

        hpx_order_in = hdulist_in[1].header['ORDER']

        if hpx_order_in > hpx_order:
            hpxmap = HpxMap.create_from_hdulist(hdulist_in)
            hpxmap_out = hpxmap.ud_grade(hpx_order, preserve_counts=True)
            hpxlist_out = hdulist_in
            #hpxlist_out['SKYMAP'] = hpxmap_out.create_image_hdu()
            hpxlist_out[1] = hpxmap_out.create_image_hdu()
            hpxlist_out[1].name = 'SKYMAP'
            hpxlist_out.writeto(outsrcmap)
            return hpx_order
        else:
            os.system('cp %s %s' % (ccube, outsrcmap))
            #os.system('cp %s.gz %s.gz' % (ccube, outsrcmap))
            #os.system('gunzip -f %s.gz' % (outsrcmap))
        return None
示例#3
0
 def _compute_counts_from_model(model, bexpcube):
     """ Make the counts maps from teh mdoe
     """
     data = model.data * bexpcube.data
     ebins = model.hpx.ebins
     ratio = ebins[1:] / ebins[0:-1]
     half_log_ratio = np.log(ratio) / 2.
     int_map = ((data[0:-1].T * ebins[0:-1]) + (data[1:].T * ebins[1:])) * half_log_ratio
     return HpxMap(int_map.T, model.hpx)
示例#4
0
 def _make_bright_pixel_mask(intensity_mean, mask_factor=5.0):
     """ Make of mask of all the brightest pixels """
     mask = np.zeros((intensity_mean.data.shape), bool)
     nebins = len(intensity_mean.data)
     sum_intensity = intensity_mean.data.sum(0)
     mean_intensity = sum_intensity.mean()
     for i in range(nebins):
         mask[i, 0:] = sum_intensity > (mask_factor * mean_intensity)
     return HpxMap(mask, intensity_mean.hpx)
示例#5
0
    def _smooth_hpx_map(hpx_map, sigma):
        """ Smooth a healpix map using a Gaussian
        """
        if hpx_map.hpx.ordering == "NESTED":
            ring_map = hpx_map.swap_scheme()
        else:
            ring_map = hpx_map
        ring_data = ring_map.data.copy()
        nebins = len(hpx_map.data)
        smoothed_data = np.zeros((hpx_map.data.shape))
        for i in range(nebins):
            smoothed_data[i] = healpy.sphtfunc.smoothing(
                ring_data[i], sigma=np.radians(sigma), verbose=False)

        smoothed_data.clip(0., 1e99)
        smoothed_ring_map = HpxMap(smoothed_data, ring_map.hpx)
        if hpx_map.hpx.ordering == "NESTED":
            return smoothed_ring_map.swap_scheme()
        return smoothed_ring_map
示例#6
0
 def _fill_masked_intensity_resid(intensity_resid, bright_pixel_mask):
     """ Fill the pixels used to compute the effective area correction with the mean intensity
     """
     filled_intensity = np.zeros((intensity_resid.data.shape))
     nebins = len(intensity_resid.data)
     for i in range(nebins):
         masked = bright_pixel_mask.data[i]
         unmasked = np.invert(masked)
         mean_intensity = intensity_resid.data[i][unmasked].mean()
         filled_intensity[i] = np.where(masked, mean_intensity, intensity_resid.data[i])
     return HpxMap(filled_intensity, intensity_resid.hpx)
示例#7
0
    def _differential_to_integral(hpx_map):
        """ Convert a differential map to an integral map

        Here we are using log-log-quadrature to compute the integral quantities.
        """
        ebins = hpx_map.hpx.ebins
        ratio = ebins[1:] / ebins[0:-1]
        half_log_ratio = np.log(ratio) / 2.
        int_map = ((hpx_map.data[0:-1].T * ebins[0:-1]) +
                   (hpx_map.data[1:].T * ebins[1:])) * half_log_ratio
        return HpxMap(int_map.T, hpx_map.hpx)
示例#8
0
    def create_from_gti(cls, skydir, tab_sc, tab_gti, zmax, **kwargs):

        radius = kwargs.get('radius', 180.0)
        cth_edges = kwargs.get('cth_edges', None)
        if cth_edges is None:
            cth_edges = 1.0 - np.linspace(0, 1.0, 41)**2
            cth_edges = cth_edges[::-1]

        hpx = HPX(2**4, True, 'CEL', ebins=cth_edges)

        hpx_skydir = hpx.get_sky_dirs()

        m = skydir.separation(hpx_skydir).deg < radius
        map_lt = HpxMap(np.zeros((40, hpx.npix)), hpx)
        map_lt_wt = HpxMap(np.zeros((40, hpx.npix)), hpx)

        lt, lt_wt = fill_livetime_hist(
            hpx_skydir[m], tab_sc, tab_gti, zmax, cth_edges)
        map_lt.data[:, m] = lt
        map_lt_wt.data[:, m] = lt_wt

        hpx2 = HPX(2**6, True, 'CEL', ebins=cth_edges)

        ltc = cls(np.zeros((len(cth_edges) - 1, hpx2.npix)), hpx2, cth_edges)
        ltc_skydir = ltc.hpx.get_sky_dirs()
        m = skydir.separation(ltc_skydir).deg < radius

        ltc.data[:, m] = map_lt.interpolate(ltc_skydir[m].ra.deg,
                                            ltc_skydir[m].dec.deg,
                                            interp_log=False)
        ltc.data_wt[:, m] = map_lt_wt.interpolate(ltc_skydir[m].ra.deg,
                                                  ltc_skydir[m].dec.deg,
                                                  interp_log=False)
        return ltc
示例#9
0
def main():
    """ Main function for command line usage """
    usage = "usage: %(prog)s [options] "
    description = "Merge a set of Fermi-LAT files."

    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('-o',
                        '--output',
                        default=None,
                        type=str,
                        help='Output file.')
    parser.add_argument('--ccube',
                        default=None,
                        type=str,
                        help='Input counts cube file .')
    parser.add_argument('--bexpcube',
                        default=None,
                        type=str,
                        help='Input binned exposure cube.')
    parser.add_argument('--hpx_order',
                        default=None,
                        type=int,
                        help='Order of output map: default = counts map order')
    parser.add_argument('--clobber',
                        action='store_true',
                        help='Overwrite output file')

    args = parser.parse_args()

    ccube = HpxMap.create_from_fits(args.ccube, hdu='SKYMAP')
    bexpcube = HpxMap.create_from_fits(args.bexpcube, hdu='HPXEXPOSURES')

    if args.hpx_order:
        hpx_order = args.hpx_order
    else:
        hpx_order = ccube.hpx.order

    out_cube = intensity_cube(ccube, bexpcube, hpx_order)
    out_cube.hpx.write_fits(out_cube.data, args.output, clobber=args.clobber)
示例#10
0
    def __init__(self, data, hpx, cth_edges, **kwargs):
        HpxMap.__init__(self, data, hpx)
        self._cth_edges = cth_edges
        self._cth_center = edge_to_center(self._cth_edges)
        self._cth_width = edge_to_width(self._cth_edges)
        self._domega = (self._cth_edges[1:] -
                        self._cth_edges[:-1]) * 2 * np.pi
        self._tstart = kwargs.get('tstart', None)
        self._tstop = kwargs.get('tstop', None)
        self._zmin = kwargs.get('zmin', 0.0)
        self._zmax = kwargs.get('zmax', 180.0)
        self._tab_gti = kwargs.get('tab_gti', None)
        self._header = kwargs.get('header', None)
        self._data_wt = kwargs.get('data_wt', None)

        if self._data_wt is None:
            self._data_wt = np.zeros_like(self.data)

        if self._tab_gti is None:
            cols = [Column(name='START', dtype='f8', unit='s'),
                    Column(name='STOP', dtype='f8', unit='s')]
            self._tab_gti = Table(cols)
示例#11
0
    def __init__(self, data, hpx, cth_edges, **kwargs):
        HpxMap.__init__(self, data, hpx)
        self._cth_edges = cth_edges
        self._cth_center = edge_to_center(self._cth_edges)
        self._cth_width = edge_to_width(self._cth_edges)
        self._domega = (self._cth_edges[1:] -
                        self._cth_edges[:-1]) * 2 * np.pi
        self._tstart = kwargs.get('tstart', None)
        self._tstop = kwargs.get('tstop', None)
        self._zmin = kwargs.get('zmin', 0.0)
        self._zmax = kwargs.get('zmax', 180.0)
        self._tab_gti = kwargs.get('tab_gti', None)
        self._header = kwargs.get('header', None)
        self._data_wt = kwargs.get('data_wt', None)

        if self._data_wt is None:
            self._data_wt = np.zeros_like(self.data)

        if self._tab_gti is None:
            cols = [Column(name='START', dtype='f8', unit='s'),
                    Column(name='STOP', dtype='f8', unit='s')]
            self._tab_gti = Table(cols)
示例#12
0
def test_hpxmap(tmpdir):
    n = np.ones((10, 192), 'd')
    hpx = HPX(4, False, 'GAL')

    filename = str(tmpdir / 'test_hpx.fits')
    hpx.write_fits(n, filename, clobber=True)

    ebins = np.logspace(2, 5, 8)

    hpx_2 = HPX(1024, False, 'GAL', region='DISK(110.,75.,2.)', ebins=ebins)
    npixels = hpx_2.npix

    n2 = np.ndarray((8, npixels), 'd')
    for i in range(8):
        n2[i].flat = np.arange(npixels)

    hpx_map = HpxMap(n2, hpx_2)
    wcs, wcs_data = hpx_map.make_wcs_from_hpx(normalize=True)

    wcs_out = hpx_2.make_wcs(3)

    filename = str(tmpdir / 'test_hpx_2_wcs.fits')
    write_fits_image(wcs_data, wcs_out.wcs, filename)
示例#13
0
def stack_energy_planes_hpx(filelist, **kwargs):
    """
    """
    from fermipy.skymap import HpxMap
    from fermipy.hpx_utils import HPX
    maplist = [HpxMap.create_from_fits(fname, **kwargs) for fname in filelist]
    energies = np.log10(
        np.hstack([amap.hpx.evals for amap in maplist])).squeeze()

    counts = np.hstack([amap.counts.flat for amap in maplist])
    counts = counts.reshape((len(energies), int(len(counts) / len(energies))))

    template_map = maplist[0]
    hpx = HPX.create_from_header(template_map.hpx.make_header(), energies)
    return HpxMap(counts, hpx)
示例#14
0
    def append_hdus(hdulist, srcmap_file, source_names, hpx_order):
        """Append HEALPix maps to a list

        Parameters
        ----------

        hdulist : list
            The list being appended to
        srcmap_file : str
            Path to the file containing the HDUs
        source_names : list of str
            Names of the sources to extract from srcmap_file
        hpx_order : int
            Maximum order for maps
        """
        sys.stdout.write("  Extracting %i sources from %s" %
                         (len(source_names), srcmap_file))
        try:
            hdulist_in = fits.open(srcmap_file)
        except IOError:
            try:
                hdulist_in = fits.open('%s.gz' % srcmap_file)
            except IOError:
                sys.stdout.write("  Missing file %s\n" % srcmap_file)
                return

        for source_name in source_names:
            sys.stdout.write('.')
            sys.stdout.flush()
            if hpx_order is None:
                hdulist.append(hdulist_in[source_name])
            else:
                try:
                    hpxmap = HpxMap.create_from_hdulist(hdulist_in,
                                                        hdu=source_name)
                except IndexError:
                    print("  Index error on source %s in file %s" %
                          (source_name, srcmap_file))
                    continue
                except KeyError:
                    print("  Key error on source %s in file %s" %
                          (source_name, srcmap_file))
                    continue
                hpxmap_out = hpxmap.ud_grade(hpx_order, preserve_counts=True)
                hdulist.append(hpxmap_out.create_image_hdu(name=source_name))
        sys.stdout.write("\n")
        hdulist.flush()
        hdulist_in.close()
示例#15
0
def intensity_cube(ccube, bexpcube, hpx_order):
    """
    """
    if hpx_order == ccube.hpx.order:
        ccube_at_order = ccube
    else:
        ccube_at_order = ccube.ud_grade(hpx_order, preserve_counts=True)
    
    if hpx_order == bexpcube.hpx.order:
        bexpcube_at_order = bexpcube
    else:
        bexpcube_at_order = bexpcube.ud_grade(hpx_order, preserve_counts=True)
    
    bexpcube_data = np.sqrt(bexpcube_at_order.data[0:-1,0:]*bexpcube_at_order.data[1:,0:])
    out_data = ccube_at_order.counts / bexpcube_data
    return HpxMap(out_data, ccube_at_order.hpx)    
示例#16
0
def update_hpx_skymap_allsky(map_in, map_out):
    """ 'Update' a HEALPix skymap

    This checks map_out exists and creates it from map_in if it does not.
    If map_out does exist, this adds the data in map_in to map_out
    """
    if map_out is None:
        in_hpx = map_in.hpx
        out_hpx = HPX.create_hpx(in_hpx.nside, in_hpx.nest, in_hpx.coordsys,
                                 None, in_hpx.ebins, None, in_hpx.conv, None)
        data_out = map_in.expanded_counts_map()
        print(data_out.shape, data_out.sum())
        map_out = HpxMap(data_out, out_hpx)
    else:
        map_out.data += map_in.expanded_counts_map()
    return map_out
示例#17
0
    def append_hdus(hdulist, srcmap_file, source_names, hpx_order):
        """Append HEALPix maps to a list

        Parameters
        ----------

        hdulist : list
            The list being appended to
        srcmap_file : str
            Path to the file containing the HDUs
        source_names : list of str
            Names of the sources to extract from srcmap_file
        hpx_order : int
            Maximum order for maps
        """
        sys.stdout.write("  Extracting %i sources from %s" % (len(source_names), srcmap_file))
        try:
            hdulist_in = fits.open(srcmap_file)
        except IOError:
            try:
                hdulist_in = fits.open('%s.gz' % srcmap_file)
            except IOError:
                sys.stdout.write("  Missing file %s\n" % srcmap_file)
                return

        for source_name in source_names:
            sys.stdout.write('.')
            sys.stdout.flush()
            if hpx_order is None:
                hdulist.append(hdulist_in[source_name])
            else:
                try:
                    hpxmap = HpxMap.create_from_hdulist(hdulist_in, hdu=source_name)
                except IndexError:
                    print("  Index error on source %s in file %s" % (source_name, srcmap_file))
                    continue
                except KeyError:
                    print("  Key error on source %s in file %s" % (source_name, srcmap_file))
                    continue
                hpxmap_out = hpxmap.ud_grade(hpx_order, preserve_counts=True)
                hdulist.append(hpxmap_out.create_image_hdu(name=source_name))
        sys.stdout.write("\n")
        hdulist.flush()
        hdulist_in.close()
示例#18
0
    def _intergral_to_differential(hpx_map, gamma=-2.0):
        """ Convert integral quantity to differential quantity

        Here we are assuming the spectrum is a powerlaw with index gamma and we
        are using log-log-quadrature to compute the integral quantities.
        """
        nebins = len(hpx_map.data)
        diff_map = np.zeros((nebins + 1, hpx_map.hpx.npix))
        ebins = hpx_map.hpx.ebins
        ratio = ebins[1:] / ebins[0:-1]
        half_log_ratio = np.log(ratio) / 2.
        ratio_gamma = np.power(ratio, gamma)
        #ratio_inv_gamma = np.power(ratio, -1. * gamma)

        diff_map[0] = hpx_map.data[0] / ((ebins[0] + ratio_gamma[0] * ebins[1]) * half_log_ratio[0])
        for i in range(nebins):
            diff_map[i + 1] = (hpx_map.data[i] / (ebins[i + 1] *
                                                  half_log_ratio[i])) - (diff_map[i] / ratio[i])
        return HpxMap(diff_map, hpx_map.hpx)
示例#19
0
def main():

    import sys
    import argparse

    # Argument defintion
    usage = "usage: %(prog)s [options]"
    description = "Collect all the new source"

    parser = argparse.ArgumentParser(usage, description=__abstract__)

    parser.add_argument("-i",
                        "--input",
                        type=argparse.FileType('r'),
                        required=True,
                        help="Input file")

    parser.add_argument("-e",
                        "--extension",
                        type=str,
                        default="SKYMAP",
                        help="FITS HDU with HEALPix map")

    parser.add_argument("--ebin",
                        type=str,
                        default=None,
                        help="Energy bin, integer or 'ALL'")

    parser.add_argument("--zscale",
                        type=str,
                        default='log',
                        help="Scaling for color scale")

    parser.add_argument("--zmin",
                        type=float,
                        default=None,
                        help="Minimum z-axis value")

    parser.add_argument("--zmax",
                        type=float,
                        default=None,
                        help="Maximum z-axis value")

    parser.add_argument("-o",
                        "--output",
                        type=argparse.FileType('w'),
                        help="Output file.  Leave blank for interactive.")

    # Parse the command line
    args = parser.parse_args(sys.argv[1:])

    # Get the model
    f = pf.open(args.input.name)
    # We need a better check
    maptype = "None"

    model_hdu = f[args.extension]

    hpxmap = HpxMap.create_from_hdulist(f, hdu=args.extension)
    outdata = []

    if args.ebin == "ALL":
        wcsproj = hpxmap.hpx.make_wcs(naxis=2,
                                      proj='MOL',
                                      energies=None,
                                      oversample=2)
        mapping = HpxToWcsMapping(hpxmap.hpx, wcsproj)

        for i, data in enumerate(hpxmap.counts):
            ip = ImagePlotter(data=data, proj=hpxmap.hpx, mapping=mapping)
            fig = plt.figure(i)
            im, ax = ip.plot(zscale=args.zscale,
                             vmin=args.zmin,
                             vmax=args.zmax)
            outdata.append(fig)

    elif args.ebin is None:
        ip = ImagePlotter(data=hpxmap.counts, proj=hpxmap.hpx)
        im, ax = ip.plot(zscale=args.zscale, vmin=args.zmin, vmax=args.zmax)
        outdata.append((im, ax))
    else:
        try:
            ibin = int(args.ebin)
            ip = ImagePlotter(data=hpxmap.counts[ibin], proj=hpxmap.hpx)
            im, ax = ip.plot(zscale=args.zscale,
                             vmin=args.zmin,
                             vmax=args.zmax)
            outdata.append((im, ax))
        except:
            raise ValueError("--ebin argument must be an integer or 'ALL'")

    if args.output is None:
        plt.show()
    else:
        if len(outdata) == 1:
            plt.savefig(args.output.name)
        else:
            base, ext = os.path.splitext(args.output.name)
            for i, fig in enumerate(outdata):
                fig.savefig("%s_%02i%s" % (base, i, ext))
示例#20
0
def run_flux_sensitivity(**kwargs):

    index = kwargs.get('index', 2.0)
    sedshape = kwargs.get('sedshape', 'PowerLaw')
    cutoff = kwargs.get('cutoff', 1e3)
    curvindex = kwargs.get('curvindex', 1.0)
    beta = kwargs.get('beta', 0.0)
    emin = kwargs.get('emin', 10**1.5)
    emax = kwargs.get('emax', 10**6.0)
    nbin = kwargs.get('nbin', 18)
    glon = kwargs.get('glon', 0.0)
    glat = kwargs.get('glat', 0.0)
    ltcube_filepath = kwargs.get('ltcube', None)
    galdiff_filepath = kwargs.get('galdiff', None)
    isodiff_filepath = kwargs.get('isodiff', None)
    galdiff_fit_filepath = kwargs.get('galdiff_fit', None)
    isodiff_fit_filepath = kwargs.get('isodiff_fit', None)
    wcs_npix = kwargs.get('wcs_npix', 40)
    wcs_cdelt = kwargs.get('wcs_cdelt', 0.5)
    wcs_proj = kwargs.get('wcs_proj', 'AIT')
    map_type = kwargs.get('map_type', None)
    spatial_model = kwargs.get('spatial_model', 'PointSource')
    spatial_size = kwargs.get('spatial_size', 1E-2)

    obs_time_yr = kwargs.get('obs_time_yr', None)
    event_class = kwargs.get('event_class', 'P8R2_SOURCE_V6')
    min_counts = kwargs.get('min_counts', 3.0)
    ts_thresh = kwargs.get('ts_thresh', 25.0)
    nside = kwargs.get('hpx_nside', 16)
    output = kwargs.get('output', None)

    event_types = [['FRONT', 'BACK']]

    if sedshape == 'PowerLaw':
        fn = spectrum.PowerLaw([1E-13, -index], scale=1E3)
    elif sedshape == 'PLSuperExpCutoff':
        fn = spectrum.PLSuperExpCutoff([1E-13, -index, cutoff, curvindex],
                                       scale=1E3)
    elif sedshape == 'LogParabola':
        fn = spectrum.LogParabola([1E-13, -index, beta], scale=1E3)

    log_ebins = np.linspace(np.log10(emin), np.log10(emax), nbin + 1)
    ebins = 10**log_ebins
    ectr = np.exp(utils.edge_to_center(np.log(ebins)))

    c = SkyCoord(glon, glat, unit='deg', frame='galactic')

    if ltcube_filepath is None:

        if obs_time_yr is None:
            raise Exception('No observation time defined.')

        ltc = LTCube.create_from_obs_time(obs_time_yr * 365 * 24 * 3600.)
    else:
        ltc = LTCube.create(ltcube_filepath)
        if obs_time_yr is not None:
            ltc._counts *= obs_time_yr * 365 * \
                24 * 3600. / (ltc.tstop - ltc.tstart)

    gdiff = skymap.Map.create_from_fits(galdiff_filepath)
    gdiff_fit = None
    if galdiff_fit_filepath is not None:
        gdiff_fit = skymap.Map.create_from_fits(galdiff_fit_filepath)

    if isodiff_filepath is None:
        isodiff = utils.resolve_file_path('iso_%s_v06.txt' % event_class,
                                          search_dirs=[
                                              os.path.join(
                                                  '$FERMIPY_ROOT', 'data'),
                                              '$FERMI_DIFFUSE_DIR'
                                          ])
        isodiff = os.path.expandvars(isodiff)
    else:
        isodiff = isodiff_filepath

    iso = np.loadtxt(isodiff, unpack=True)
    iso_fit = None
    if isodiff_fit_filepath is not None:
        iso_fit = np.loadtxt(isodiff_fit_filepath, unpack=True)

    scalc = SensitivityCalc(gdiff,
                            iso,
                            ltc,
                            ebins,
                            event_class,
                            event_types,
                            gdiff_fit=gdiff_fit,
                            iso_fit=iso_fit,
                            spatial_model=spatial_model,
                            spatial_size=spatial_size)

    # Compute Maps
    map_diff_flux = None
    map_diff_npred = None
    map_int_flux = None
    map_int_npred = None

    map_nstep = 500

    if map_type == 'hpx':

        hpx = HPX(nside, True, 'GAL', ebins=ebins)
        map_diff_flux = HpxMap(np.zeros((nbin, hpx.npix)), hpx)
        map_diff_npred = HpxMap(np.zeros((nbin, hpx.npix)), hpx)
        map_skydir = map_diff_flux.hpx.get_sky_dirs()

        for i in range(0, len(map_skydir), map_nstep):
            s = slice(i, i + map_nstep)
            o = scalc.diff_flux_threshold(map_skydir[s], fn, ts_thresh,
                                          min_counts)
            map_diff_flux.data[:, s] = o['flux'].T
            map_diff_npred.data[:, s] = o['npred'].T

        hpx = HPX(nside, True, 'GAL')
        map_int_flux = HpxMap(np.zeros((hpx.npix)), hpx)
        map_int_npred = HpxMap(np.zeros((hpx.npix)), hpx)
        map_skydir = map_int_flux.hpx.get_sky_dirs()

        for i in range(0, len(map_skydir), map_nstep):
            s = slice(i, i + map_nstep)
            o = scalc.int_flux_threshold(map_skydir[s], fn, ts_thresh,
                                         min_counts)
            map_int_flux.data[s] = o['flux']
            map_int_npred.data[s] = o['npred']

    elif map_type == 'wcs':

        wcs_shape = [wcs_npix, wcs_npix]
        wcs_size = wcs_npix * wcs_npix

        map_diff_flux = Map.create(c,
                                   wcs_cdelt,
                                   wcs_shape,
                                   'GAL',
                                   wcs_proj,
                                   ebins=ebins)
        map_diff_npred = Map.create(c,
                                    wcs_cdelt,
                                    wcs_shape,
                                    'GAL',
                                    wcs_proj,
                                    ebins=ebins)
        map_skydir = map_diff_flux.get_pixel_skydirs()

        for i in range(0, len(map_skydir), map_nstep):
            idx = np.unravel_index(np.arange(i, min(i + map_nstep, wcs_size)),
                                   wcs_shape)
            s = (slice(None), idx[1], idx[0])
            o = scalc.diff_flux_threshold(map_skydir[slice(i, i + map_nstep)],
                                          fn, ts_thresh, min_counts)
            map_diff_flux.data[s] = o['flux'].T
            map_diff_npred.data[s] = o['npred'].T

        map_int_flux = Map.create(c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj)
        map_int_npred = Map.create(c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj)
        map_skydir = map_int_flux.get_pixel_skydirs()

        for i in range(0, len(map_skydir), map_nstep):
            idx = np.unravel_index(np.arange(i, min(i + map_nstep, wcs_size)),
                                   wcs_shape)
            s = (idx[1], idx[0])
            o = scalc.int_flux_threshold(map_skydir[slice(i, i + map_nstep)],
                                         fn, ts_thresh, min_counts)
            map_int_flux.data[s] = o['flux']
            map_int_npred.data[s] = o['npred']

    o = scalc.diff_flux_threshold(c, fn, ts_thresh, min_counts)

    cols = [
        Column(name='e_min', dtype='f8', data=scalc.ebins[:-1], unit='MeV'),
        Column(name='e_ref', dtype='f8', data=o['e_ref'], unit='MeV'),
        Column(name='e_max', dtype='f8', data=scalc.ebins[1:], unit='MeV'),
        Column(name='flux', dtype='f8', data=o['flux'], unit='ph / (cm2 s)'),
        Column(name='eflux', dtype='f8', data=o['eflux'],
               unit='MeV / (cm2 s)'),
        Column(name='dnde',
               dtype='f8',
               data=o['dnde'],
               unit='ph / (MeV cm2 s)'),
        Column(name='e2dnde',
               dtype='f8',
               data=o['e2dnde'],
               unit='MeV / (cm2 s)'),
        Column(name='npred', dtype='f8', data=o['npred'], unit='ph')
    ]

    tab_diff = Table(cols)

    cols = [
        Column(name='index', dtype='f8'),
        Column(name='e_min', dtype='f8', unit='MeV'),
        Column(name='e_ref', dtype='f8', unit='MeV'),
        Column(name='e_max', dtype='f8', unit='MeV'),
        Column(name='flux', dtype='f8', unit='ph / (cm2 s)'),
        Column(name='eflux', dtype='f8', unit='MeV / (cm2 s)'),
        Column(name='dnde', dtype='f8', unit='ph / (MeV cm2 s)'),
        Column(name='e2dnde', dtype='f8', unit='MeV / (cm2 s)'),
        Column(name='npred', dtype='f8', unit='ph'),
        Column(name='ebin_e_min', dtype='f8', unit='MeV', shape=(len(ectr), )),
        Column(name='ebin_e_ref', dtype='f8', unit='MeV', shape=(len(ectr), )),
        Column(name='ebin_e_max', dtype='f8', unit='MeV', shape=(len(ectr), )),
        Column(name='ebin_flux',
               dtype='f8',
               unit='ph / (cm2 s)',
               shape=(len(ectr), )),
        Column(name='ebin_eflux',
               dtype='f8',
               unit='MeV / (cm2 s)',
               shape=(len(ectr), )),
        Column(name='ebin_dnde',
               dtype='f8',
               unit='ph / (MeV cm2 s)',
               shape=(len(ectr), )),
        Column(name='ebin_e2dnde',
               dtype='f8',
               unit='MeV / (cm2 s)',
               shape=(len(ectr), )),
        Column(name='ebin_npred', dtype='f8', unit='ph', shape=(len(ectr), ))
    ]

    cols_ebounds = [
        Column(name='E_MIN', dtype='f8', unit='MeV', data=ebins[:-1]),
        Column(name='E_MAX', dtype='f8', unit='MeV', data=ebins[1:]),
    ]

    tab_int = Table(cols)
    tab_ebounds = Table(cols_ebounds)

    index = np.linspace(1.0, 5.0, 4 * 4 + 1)

    for g in index:
        fn = spectrum.PowerLaw([1E-13, -g], scale=10**3.5)
        o = scalc.int_flux_threshold(c, fn, ts_thresh, 3.0)
        row = [g]
        for colname in tab_int.columns:
            if colname == 'index':
                continue
            if 'ebin' in colname:
                row += [o['bins'][colname.replace('ebin_', '')]]
            else:
                row += [o[colname]]

        tab_int.add_row(row)

    hdulist = fits.HDUList()
    hdulist.append(fits.table_to_hdu(tab_diff))
    hdulist.append(fits.table_to_hdu(tab_int))
    hdulist.append(fits.table_to_hdu(tab_ebounds))

    hdulist[1].name = 'DIFF_FLUX'
    hdulist[2].name = 'INT_FLUX'
    hdulist[3].name = 'EBOUNDS'

    if map_type is not None:
        hdu = map_diff_flux.create_image_hdu()
        hdu.name = 'MAP_DIFF_FLUX'
        hdulist.append(hdu)
        hdu = map_diff_npred.create_image_hdu()
        hdu.name = 'MAP_DIFF_NPRED'
        hdulist.append(hdu)

        hdu = map_int_flux.create_image_hdu()
        hdu.name = 'MAP_INT_FLUX'
        hdulist.append(hdu)
        hdu = map_int_npred.create_image_hdu()
        hdu.name = 'MAP_INT_NPRED'
        hdulist.append(hdu)

    hdulist.writeto(output, clobber=True)
示例#21
0
文件: HEALview.py 项目: NAH8/fermipy
def main():

    import sys
    import argparse

    # Argument defintion
    usage = "usage: %(prog)s [options]" 
    description = "Collect all the new source"

    parser = argparse.ArgumentParser(usage,description=__abstract__)

    parser.add_argument("-i", "--input",type=argparse.FileType('r'),required=True,
                        help="Input file")

    parser.add_argument("-e", "--extension",type=str,default="SKYMAP",
                        help="FITS HDU with HEALPix map")
 
    parser.add_argument("--ebin",type=str,default=None,
                        help="Energy bin, integer or 'ALL'")
    
    parser.add_argument("-o", "--output",type=argparse.FileType('w'),
                        help="Output file.  Leave blank for interactive.")
    
    # Parse the command line
    args = parser.parse_args(sys.argv[1:])

    # Get the model 
    f = pf.open(args.input.name)
    # We need a better check
    maptype = "None"

    model_hdu = f[args.extension]
        
    hpxmap = HpxMap.create_from_hdulist(f,extname=args.extension,ebounds="EBOUNDS")
    outdata = []
     
    if args.ebin == "ALL":
        wcsproj = hpxmap.hpx.make_wcs(naxis=2,proj='AIT',energies=None,oversample=2)
        mapping = HpxToWcsMapping(hpxmap.hpx,wcsproj)
        
        for i,data in enumerate(hpxmap.counts):
            ip =  ImagePlotter(data=data,proj=hpxmap.hpx,mapping=mapping)  
            fig = plt.figure(i)
            im,ax = ip.plot(zscale='log')
            outdata.append(fig)

    elif args.ebin is None:
        ip =  ImagePlotter(data=hpxmap.counts,proj=hpxmap.hpx)  
        im,ax = ip.plot(zscale='log')
        outdata.append((im,ax))        
    else:
        try:
            ibin = int(args.ebin)
            ip =  ImagePlotter(data=hpxmap.counts[ibin],proj=hpxmap.hpx)  
            im,ax = ip.plot(zscale='log')
            outdata.append((im,ax))        
        except:
            print("--ebin argument must be an integer or 'ALL'")

    if args.output is None:
        plt.show()
    else:
        plt.savefig(args.output.name)
示例#22
0
def merge_hpx_counts_cubes(filelist):
    """ Merge all the files in filelist, assuming that they HEALPix counts cubes
    """
    out_prim = None
    out_skymap = None
    out_ebounds = None

    datalist_gti = []
    exposure_sum = 0.
    nfiles = len(filelist)
    ngti = np.zeros(nfiles, int)

    out_name = None

    for i, filename in enumerate(filelist):
        fin = fits.open(filename)
        sys.stdout.write('.')
        sys.stdout.flush()
        if i == 0:
            out_prim = update_null_primary(fin[0], out_prim)
            out_name = fin[1].name

        map_in = HpxMap.create_from_hdulist(fin)
        out_skymap = update_hpx_skymap_allsky(map_in, out_skymap)
        if i == 0:
            try:
                out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds)
            except KeyError:
                out_ebounds = update_energies(fin["ENERGIES"], out_ebounds)
        try:
            (gti_data, exposure, tstop) = extract_gti_data(fin["GTI"])
            datalist_gti.append(gti_data)
            exposure_sum += exposure
            ngti[i] = len(gti_data)
        except KeyError:
            pass

        if i == 0:
            first = fin
        elif i == nfiles - 1:
            try:
                date_end = fin[0].header['DATE-END']
            except KeyError:
                date_end = None
        else:
            fin.close()

    out_skymap_hdu = out_skymap.create_image_hdu("SKYMAP")

    hdulist = [out_prim, out_skymap_hdu, out_ebounds]

    if len(datalist_gti) > 0:
        out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
        out_gti.header['EXPOSURE'] = exposure_sum
        out_gti.header['TSTOP'] = tstop
        hdulist.append(out_gti)

    for hdu in hdulist:
        if date_end:
            hdu.header['DATE-END'] = date_end

    out_prim.update_header()
    sys.stdout.write("!\n")

    return fits.HDUList(hdulist)
示例#23
0
    def run_analysis(self, argv):
        """Run this analysis"""
        args = self._parser.parse_args(argv)

        # Read the input maps
        ccube_dirty = HpxMap.create_from_fits(args.ccube_dirty, hdu='SKYMAP')
        bexpcube_dirty = HpxMap.create_from_fits(args.bexpcube_dirty, hdu='HPXEXPOSURES')
        ccube_clean = HpxMap.create_from_fits(args.ccube_clean, hdu='SKYMAP')
        bexpcube_clean = HpxMap.create_from_fits(args.bexpcube_clean, hdu='HPXEXPOSURES')

        # Decide what HEALPix order to work at
        if args.hpx_order:
            hpx_order = args.hpx_order
        else:
            hpx_order = ccube_dirty.hpx.order

        # Cast all the input maps to match ccube_clean
        cube_dict = ResidualCR._match_cubes(ccube_clean, ccube_dirty,
                                            bexpcube_clean, bexpcube_dirty, hpx_order)

        # Intenstiy maps
        intensity_clean = ResidualCR._compute_intensity(cube_dict['ccube_clean'],
                                                        cube_dict['bexpcube_clean'])
        intensity_dirty = ResidualCR._compute_intensity(cube_dict['ccube_dirty'],
                                                        cube_dict['bexpcube_dirty'])
        # Mean & ratio of intensity maps
        intensity_mean = ResidualCR._compute_mean(intensity_dirty,
                                                  intensity_clean)
        intensity_ratio = ResidualCR._compute_ratio(intensity_dirty,
                                                    intensity_clean)
        # Selecting the bright pixels for Aeff correction and to mask when filling output map
        bright_pixel_select = ResidualCR._make_bright_pixel_mask(intensity_mean,
                                                                 args.select_factor)
        bright_pixel_mask = ResidualCR._make_bright_pixel_mask(intensity_mean,
                                                               args.mask_factor)
        # Compute thte Aeff corrections using the brightest pixels
        aeff_corrections = ResidualCR._get_aeff_corrections(intensity_ratio,
                                                            bright_pixel_select)
        # Apply the Aeff corrections and get the intensity residual
        corrected_dirty = ResidualCR._apply_aeff_corrections(intensity_dirty,
                                                             aeff_corrections)
        corrected_ratio = ResidualCR._compute_ratio(corrected_dirty,
                                                    intensity_clean)
        intensity_resid = ResidualCR._compute_diff(corrected_dirty,
                                                   intensity_clean)
        # Replace the masked pixels with the map mean to avoid features associates with sources
        filled_resid = ResidualCR._fill_masked_intensity_resid(intensity_resid,
                                                               bright_pixel_mask)
        # Smooth the map
        smooth_resid = ResidualCR._smooth_hpx_map(filled_resid,
                                                  args.sigma)
        # Convert to a differential map
        out_model = ResidualCR._intergral_to_differential(smooth_resid)

        # Make the ENERGIES HDU
        out_energies = ccube_dirty.hpx.make_energies_hdu()

        # Write the maps
        cubes = dict(SKYMAP=out_model)
        fits_utils.write_maps(None, cubes,
                              args.outfile, energy_hdu=out_energies)

        if args.full_output:
            # Some diagnostics
            check = ResidualCR._differential_to_integral(out_model)
            check_resid = ResidualCR._compute_diff(smooth_resid, check)
            counts_resid =\
                ResidualCR._compute_counts_from_intensity(intensity_resid,
                                                          cube_dict['bexpcube_dirty'])
            pred_counts\
                = ResidualCR._compute_counts_from_model(out_model,
                                                        cube_dict['bexpcube_dirty'])
            pred_resid = ResidualCR._compute_diff(pred_counts, counts_resid)

            out_ebounds = ccube_dirty.hpx.make_energy_bounds_hdu()
            cubes = dict(INTENSITY_CLEAN=intensity_clean,
                         INTENSITY_DIRTY=intensity_dirty,
                         INTENSITY_RATIO=intensity_ratio,
                         CORRECTED_DIRTY=corrected_dirty,
                         CORRECTED_RATIO=corrected_ratio,
                         INTENSITY_RESID=intensity_resid,
                         PIXEL_SELECT=bright_pixel_select,
                         PIXEL_MASK=bright_pixel_mask,
                         FILLED_RESID=filled_resid,
                         SMOOTH_RESID=smooth_resid,
                         CHECK=check,
                         CHECK_RESID=check_resid,
                         COUNTS_RESID=counts_resid,
                         PRED_COUNTS=pred_counts,
                         PRED_RESID=pred_resid)

            fits_utils.write_maps(None, cubes,
                                  args.outfile.replace('.fits', '_full.fits'),
                                  energy_hdu=out_ebounds)
示例#24
0
def merge_hpx_counts_cubes(filelist):
    """ Merge all the files in filelist, assuming that they HEALPix counts cubes
    """
    out_prim = None
    out_skymap = None
    out_ebounds = None

    datalist_gti = []
    exposure_sum = 0.
    nfiles = len(filelist)
    ngti = np.zeros(nfiles, int)

    out_name = None

    for i, filename in enumerate(filelist):
        fin = fits.open(filename)
        sys.stdout.write('.')
        sys.stdout.flush()
        if i == 0:
            out_prim = update_null_primary(fin[0], out_prim)
            out_name = fin[1].name

        map_in = HpxMap.create_from_hdulist(fin)
        out_skymap = update_hpx_skymap_allsky(map_in, out_skymap)
        if i == 0:
            try:
                out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds)
            except KeyError:
                out_ebounds = update_energies(fin["ENERGIES"], out_ebounds)
        try:
            (gti_data, exposure, tstop) = extract_gti_data(fin["GTI"])
            datalist_gti.append(gti_data)
            exposure_sum += exposure
            ngti[i] = len(gti_data)
        except KeyError:
            pass

        if i == 0:
            first = fin
        elif i == nfiles - 1:
            try:
                date_end = fin[0].header['DATE-END']
            except KeyError:
                date_end = None
        else:
            fin.close()

    out_skymap_hdu = out_skymap.create_image_hdu("SKYMAP")

    hdulist = [out_prim, out_skymap_hdu, out_ebounds]

    if len(datalist_gti) > 0:
        out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
        out_gti.header['EXPOSURE'] = exposure_sum
        out_gti.header['TSTOP'] = tstop
        hdulist.append(out_gti)

    for hdu in hdulist:
        if date_end:
            hdu.header['DATE-END'] = date_end

    out_prim.update_header()
    sys.stdout.write("!\n")

    return fits.HDUList(hdulist)
示例#25
0
 def _compute_ratio(top, bot):
     """ Make a map that is the ratio of two maps
     """
     data = np.where(bot.data > 0, top.data / bot.data, 0.)
     return HpxMap(data, top.hpx)
示例#26
0
def run_flux_sensitivity(**kwargs):

    index = kwargs.get('index', 2.0)
    sedshape = kwargs.get('sedshape', 'PowerLaw')
    cutoff = kwargs.get('cutoff', 1e3)
    curvindex = kwargs.get('curvindex', 1.0)
    beta = kwargs.get('beta', 0.0)
    dmmass = kwargs.get('DMmass', 100.0)
    dmchannel = kwargs.get('DMchannel', 'bb')
    emin = kwargs.get('emin', 10**1.5)
    emax = kwargs.get('emax', 10**6.0)
    nbin = kwargs.get('nbin', 18)
    glon = kwargs.get('glon', 0.0)
    glat = kwargs.get('glat', 0.0)
    ltcube_filepath = kwargs.get('ltcube', None)
    galdiff_filepath = kwargs.get('galdiff', None)
    isodiff_filepath = kwargs.get('isodiff', None)
    galdiff_fit_filepath = kwargs.get('galdiff_fit', None)
    isodiff_fit_filepath = kwargs.get('isodiff_fit', None)
    wcs_npix = kwargs.get('wcs_npix', 40)
    wcs_cdelt = kwargs.get('wcs_cdelt', 0.5)
    wcs_proj = kwargs.get('wcs_proj', 'AIT')
    map_type = kwargs.get('map_type', None)
    spatial_model = kwargs.get('spatial_model', 'PointSource')
    spatial_size = kwargs.get('spatial_size', 1E-2)

    obs_time_yr = kwargs.get('obs_time_yr', None)
    event_class = kwargs.get('event_class', 'P8R2_SOURCE_V6')
    min_counts = kwargs.get('min_counts', 3.0)
    ts_thresh = kwargs.get('ts_thresh', 25.0)
    nside = kwargs.get('hpx_nside', 16)
    output = kwargs.get('output', None)

    event_types = [['FRONT', 'BACK']]

    if sedshape == 'PowerLaw':
        fn = spectrum.PowerLaw([1E-13, -index], scale=1E3)
    elif sedshape == 'PLSuperExpCutoff':
        fn = spectrum.PLSuperExpCutoff(
            [1E-13, -index, cutoff, curvindex], scale=1E3)
    elif sedshape == 'LogParabola':
        fn = spectrum.LogParabola([1E-13, -index, beta], scale=1E3)
    elif sedshape == 'DM':
        fn = spectrum.DMFitFunction([1E-26, dmmass], chan=dmchannel)

    log_ebins = np.linspace(np.log10(emin),
                            np.log10(emax), nbin + 1)
    ebins = 10**log_ebins
    ectr = np.exp(utils.edge_to_center(np.log(ebins)))

    c = SkyCoord(glon, glat, unit='deg', frame='galactic')

    if ltcube_filepath is None:

        if obs_time_yr is None:
            raise Exception('No observation time defined.')

        ltc = LTCube.create_from_obs_time(obs_time_yr * 365 * 24 * 3600.)
    else:
        ltc = LTCube.create(ltcube_filepath)
        if obs_time_yr is not None:
            ltc._counts *= obs_time_yr * 365 * \
                24 * 3600. / (ltc.tstop - ltc.tstart)

    gdiff = skymap.Map.create_from_fits(galdiff_filepath)
    gdiff_fit = None
    if galdiff_fit_filepath is not None:
        gdiff_fit = skymap.Map.create_from_fits(galdiff_fit_filepath)

    if isodiff_filepath is None:
        isodiff = utils.resolve_file_path('iso_%s_v06.txt' % event_class,
                                          search_dirs=[os.path.join('$FERMIPY_ROOT', 'data'),
                                                       '$FERMI_DIFFUSE_DIR'])
        isodiff = os.path.expandvars(isodiff)
    else:
        isodiff = isodiff_filepath

    iso = np.loadtxt(isodiff, unpack=True)
    iso_fit = None
    if isodiff_fit_filepath is not None:
        iso_fit = np.loadtxt(isodiff_fit_filepath, unpack=True)

    scalc = SensitivityCalc(gdiff, iso, ltc, ebins,
                            event_class, event_types, gdiff_fit=gdiff_fit,
                            iso_fit=iso_fit, spatial_model=spatial_model,
                            spatial_size=spatial_size)

    # Compute Maps
    map_diff_flux = None
    map_diff_npred = None
    map_int_flux = None
    map_int_npred = None

    map_nstep = 500

    if map_type == 'hpx':

        hpx = HPX(nside, True, 'GAL', ebins=ebins)
        map_diff_flux = HpxMap(np.zeros((nbin, hpx.npix)), hpx)
        map_diff_npred = HpxMap(np.zeros((nbin, hpx.npix)), hpx)
        map_skydir = map_diff_flux.hpx.get_sky_dirs()

        for i in range(0, len(map_skydir), map_nstep):
            s = slice(i, i + map_nstep)
            o = scalc.diff_flux_threshold(
                map_skydir[s], fn, ts_thresh, min_counts)
            map_diff_flux.data[:, s] = o['flux'].T
            map_diff_npred.data[:, s] = o['npred'].T

        hpx = HPX(nside, True, 'GAL')
        map_int_flux = HpxMap(np.zeros((hpx.npix)), hpx)
        map_int_npred = HpxMap(np.zeros((hpx.npix)), hpx)
        map_skydir = map_int_flux.hpx.get_sky_dirs()

        for i in range(0, len(map_skydir), map_nstep):
            s = slice(i, i + map_nstep)
            o = scalc.int_flux_threshold(
                map_skydir[s], fn, ts_thresh, min_counts)
            map_int_flux.data[s] = o['flux']
            map_int_npred.data[s] = o['npred']

    elif map_type == 'wcs':

        wcs_shape = [wcs_npix, wcs_npix]
        wcs_size = wcs_npix * wcs_npix

        map_diff_flux = Map.create(
            c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj, ebins=ebins)
        map_diff_npred = Map.create(
            c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj, ebins=ebins)
        map_skydir = map_diff_flux.get_pixel_skydirs()

        for i in range(0, len(map_skydir), map_nstep):
            idx = np.unravel_index(
                np.arange(i, min(i + map_nstep, wcs_size)), wcs_shape)
            s = (slice(None), idx[1], idx[0])
            o = scalc.diff_flux_threshold(
                map_skydir[slice(i, i + map_nstep)], fn, ts_thresh, min_counts)
            map_diff_flux.data[s] = o['flux'].T
            map_diff_npred.data[s] = o['npred'].T

        map_int_flux = Map.create(c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj)
        map_int_npred = Map.create(c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj)
        map_skydir = map_int_flux.get_pixel_skydirs()

        for i in range(0, len(map_skydir), map_nstep):
            idx = np.unravel_index(
                np.arange(i, min(i + map_nstep, wcs_size)), wcs_shape)
            s = (idx[1], idx[0])
            o = scalc.int_flux_threshold(
                map_skydir[slice(i, i + map_nstep)], fn, ts_thresh, min_counts)
            map_int_flux.data[s] = o['flux']
            map_int_npred.data[s] = o['npred']

    o = scalc.diff_flux_threshold(c, fn, ts_thresh, min_counts)

    cols = [Column(name='e_min', dtype='f8', data=scalc.ebins[:-1], unit='MeV'),
            Column(name='e_ref', dtype='f8', data=o['e_ref'], unit='MeV'),
            Column(name='e_max', dtype='f8', data=scalc.ebins[1:], unit='MeV'),
            Column(name='flux', dtype='f8', data=o[
                   'flux'], unit='ph / (cm2 s)'),
            Column(name='eflux', dtype='f8', data=o[
                   'eflux'], unit='MeV / (cm2 s)'),
            Column(name='dnde', dtype='f8', data=o['dnde'],
                   unit='ph / (MeV cm2 s)'),
            Column(name='e2dnde', dtype='f8',
                   data=o['e2dnde'], unit='MeV / (cm2 s)'),
            Column(name='npred', dtype='f8', data=o['npred'], unit='ph')]

    tab_diff = Table(cols)

    cols = [Column(name='index', dtype='f8'),
            Column(name='e_min', dtype='f8', unit='MeV'),
            Column(name='e_ref', dtype='f8', unit='MeV'),
            Column(name='e_max', dtype='f8', unit='MeV'),
            Column(name='flux', dtype='f8', unit='ph / (cm2 s)'),
            Column(name='eflux', dtype='f8', unit='MeV / (cm2 s)'),
            Column(name='dnde', dtype='f8', unit='ph / (MeV cm2 s)'),
            Column(name='e2dnde', dtype='f8', unit='MeV / (cm2 s)'),
            Column(name='npred', dtype='f8', unit='ph'),
            Column(name='ebin_e_min', dtype='f8',
                   unit='MeV', shape=(len(ectr),)),
            Column(name='ebin_e_ref', dtype='f8',
                   unit='MeV', shape=(len(ectr),)),
            Column(name='ebin_e_max', dtype='f8',
                        unit='MeV', shape=(len(ectr),)),
            Column(name='ebin_flux', dtype='f8',
                   unit='ph / (cm2 s)', shape=(len(ectr),)),
            Column(name='ebin_eflux', dtype='f8',
                   unit='MeV / (cm2 s)', shape=(len(ectr),)),
            Column(name='ebin_dnde', dtype='f8',
                   unit='ph / (MeV cm2 s)', shape=(len(ectr),)),
            Column(name='ebin_e2dnde', dtype='f8',
                   unit='MeV / (cm2 s)', shape=(len(ectr),)),
            Column(name='ebin_npred', dtype='f8', unit='ph', shape=(len(ectr),))]

    cols_ebounds = [Column(name='E_MIN', dtype='f8',
                           unit='MeV', data=ebins[:-1]),
                    Column(name='E_MAX', dtype='f8',
                           unit='MeV', data=ebins[1:]), ]

    tab_int = Table(cols)
    tab_ebounds = Table(cols_ebounds)

    index = np.linspace(1.0, 5.0, 4 * 4 + 1)

    for g in index:
        fn = spectrum.PowerLaw([1E-13, -g], scale=10**3.5)
        o = scalc.int_flux_threshold(c, fn, ts_thresh, 3.0)
        row = [g]
        for colname in tab_int.columns:
            if colname == 'index':
                continue
            if 'ebin' in colname:
                row += [o['bins'][colname.replace('ebin_', '')]]
            else:
                row += [o[colname]]

        tab_int.add_row(row)

    hdulist = fits.HDUList()
    hdulist.append(fits.table_to_hdu(tab_diff))
    hdulist.append(fits.table_to_hdu(tab_int))
    hdulist.append(fits.table_to_hdu(tab_ebounds))

    hdulist[1].name = 'DIFF_FLUX'
    hdulist[2].name = 'INT_FLUX'
    hdulist[3].name = 'EBOUNDS'

    if map_type is not None:
        hdu = map_diff_flux.create_image_hdu()
        hdu.name = 'MAP_DIFF_FLUX'
        hdulist.append(hdu)
        hdu = map_diff_npred.create_image_hdu()
        hdu.name = 'MAP_DIFF_NPRED'
        hdulist.append(hdu)

        hdu = map_int_flux.create_image_hdu()
        hdu.name = 'MAP_INT_FLUX'
        hdulist.append(hdu)
        hdu = map_int_npred.create_image_hdu()
        hdu.name = 'MAP_INT_NPRED'
        hdulist.append(hdu)

    hdulist.writeto(output, overwrite=True)
示例#27
0
 def _apply_aeff_corrections(intensity_map, aeff_corrections):
     """ Multipy a map by the effective area correction
     """
     data = aeff_corrections * intensity_map.data.T
     return HpxMap(data.T, intensity_map.hpx)
示例#28
0
 def _compute_diff(map1, map2):
     """ Make a map that is the difference of two maps
     """
     data = map1.data - map2.data
     return HpxMap(data, map1.hpx)
示例#29
0
 def __init__(self, data, hpx):
     HpxMap.__init__(self, data, hpx)
示例#30
0
 def _compute_product(map1, map2):
     """ Make a map that is the product of two maps
     """
     data = map1.data * map2.data
     return HpxMap(data, map1.hpx)
示例#31
0
 def _compute_mean(map1, map2):
     """ Make a map that is the mean of two maps
     """
     data = (map1.data + map2.data) / 2.
     return HpxMap(data, map1.hpx)
示例#32
0
 def __init__(self, data, hpx):
     HpxMap.__init__(self, data, hpx)
示例#33
0
 def _compute_counts_from_intensity(intensity, bexpcube):
     """ Make the counts map from the intensity
     """
     data = intensity.data * np.sqrt(bexpcube.data[1:] * bexpcube.data[0:-1])
     return HpxMap(data, intensity.hpx)