Esempio n. 1
0
def identify_objects(image_data, nsigma, min_area, deb_n_thresh, deb_cont,
                     param_dict):
    '''
    This function performs source identification and generates a segmentation map,
    which is then used for masking the sources.
    :param image_data: provide the image data, which is a mxn numpy nd array. e.g., fits.getdata('image_file_name')
    :param nsigma: source detection significance.
    :param min_area: minimum area to be considered as a source
    :param deb_n_thresh: number of threshold values for deblending routine. e.g., 32, 64 etc.
    :param deb_cont: deblend minimum contrast ratio (see source extraction or SEP python page).
    :param param_dict: a dictionary containing the
    'sep_filter_kwarg' = filter keyword argument, which can be a 'tophat', 'gauss', or 'boxcar'
    'sep_filter_size' = the 'size' of the filter. In case of gaussian, it is the FWHM of the gaussian. For tophat, it
    is the radius of the tophat filter. For boxcar, it is the side length of the 2D Box.
    :return: objects: a numpy array of the objects, ordered as per their segmentation values in the segmap.
    segmap: a segmentation map, where each source is marked with unique source identification number.
    '''

    # Note, this whole routine uses a Python-based source identification module named SEP (Barbary et al., 2016)

    # Unpack the filter keyword and its size from the parameter dictionary.
    filter_kwarg = param_dict['sep_filter_kwarg']
    filter_size = float(param_dict['sep_filter_size'])

    # Look at the SEP webpage, this is suggested for working of SEP.
    byte_swaped_data = image_data.byteswap().newbyteorder()

    # SEP estimates a global background.
    global_bkg = sep.Background(byte_swaped_data)

    # background subtracted data = original data - estimated global background.
    bkg_subtracted = byte_swaped_data - global_bkg

    # In the following block, we check for the user's choice of filter and its size.
    # We define a kernel based on their choice.
    if filter_kwarg.lower() not in ['tophat', 'gauss', 'boxcar']:
        warnings.warn(
            'The filter %s is not supported as of yet, defaulting to tophat of radius 5'
        )
        source_kernel = Tophat2DKernel(5)
    elif filter_kwarg.lower() == 'tophat':
        source_kernel = Tophat2DKernel(filter_size)
    elif filter_kwarg.lower() == 'gauss':
        _gauss_sigma = gaussian_fwhm_to_sigma(filter_size)
        source_kernel = Gaussian2DKernel(_gauss_sigma)
    elif filter_kwarg.lower() == 'boxcar':
        source_kernel = Box2DKernel(filter_size)

    # Object detection and Segmentation map generataion.
    objects, segmap = sep.extract(bkg_subtracted,
                                  nsigma,
                                  err=global_bkg.globalrms,
                                  minarea=min_area,
                                  deblend_nthresh=deb_n_thresh,
                                  deblend_cont=deb_cont,
                                  segmentation_map=True,
                                  filter_kernel=source_kernel.array)

    return objects, segmap
Esempio n. 2
0
def test_compute_lima_image():
    """
    Test Li & Ma image against TS image for Tophat kernel
    """
    filename = '$GAMMAPY_EXTRA/test_datasets/unbundled/poisson_stats_image/input_all.fits.gz'
    counts = SkyImage.read(filename, hdu='counts')
    background = SkyImage.read(filename, hdu='background')
    exposure = SkyImage.read(filename, hdu='exposure')

    kernel = Tophat2DKernel(5)
    result_lima = compute_lima_image(
        counts,
        background,
        kernel,
        exposure,
    )

    kernel.normalize('integral')
    ts_estimator = TSImageEstimator()
    images = SkyImageList([counts, background, exposure])
    result_ts = ts_estimator.run(images, kernel)

    assert_allclose(result_ts['sqrt_ts'],
                    result_lima['significance'],
                    atol=1e-3)
    assert_allclose(result_ts['flux'], result_lima['flux'], atol=3e-12)
Esempio n. 3
0
def test_compute_lima_on_off_image():
    """
    Test Li & Ma image with snippet from the H.E.S.S. survey data.
    """
    filename = "$GAMMAPY_DATA/tests/unbundled/hess/survey/hess_survey_snippet.fits.gz"
    n_on = Map.read(filename, hdu="ON")
    n_off = Map.read(filename, hdu="OFF")
    a_on = Map.read(filename, hdu="ONEXPOSURE")
    a_off = Map.read(filename, hdu="OFFEXPOSURE")
    significance = Map.read(filename, hdu="SIGNIFICANCE")

    kernel = Tophat2DKernel(5)
    results = LiMaMapEstimator.compute_lima_on_off_image(
        n_on, n_off, a_on, a_off, kernel)

    # Reproduce safe significance threshold from HESS software
    results["significance"].data[results["n_on"].data < 5] = 0

    # crop the image at the boundaries, because the reference image
    # is cut out from a large map, there is no way to reproduce the
    # result with regular boundary handling
    actual = results["significance"].crop(kernel.shape).data
    desired = significance.crop(kernel.shape).data

    # Set boundary to NaN in reference image
    # The absolute tolerance is low because the method used here is slightly different from the one used in HGPS
    # n_off is convolved as well to ensure the method applies to true ON-OFF datasets
    assert_allclose(actual, desired, atol=0.2)
Esempio n. 4
0
    def _run_iteration(self, images):
        """Run one iteration.

        Parameters
        ----------
        images : `gammapy.image.SkyImageList`
            Input sky images
        """
        from scipy.ndimage import binary_erosion
        images.check_required(['counts', 'exclusion', 'background'])
        wcs = images['counts'].wcs.copy()
        p = self.parameters

        significance = self._estimate_significance(images['counts'],
                                                   images['background'])

        # update exclusion mask
        radius = p['mask_dilation_radius'].to('deg')
        scale = images['counts'].wcs_pixel_scale()[0]
        structure = np.array(Tophat2DKernel((radius / scale).value))

        mask = (significance.data <
                p['significance_threshold']) | np.isnan(significance)
        mask = binary_erosion(mask, structure, border_value=1)
        exclusion = SkyImage(name='exclusion',
                             data=mask.astype('float'),
                             wcs=wcs)

        background = self._estimate_background(images['counts'], exclusion)
        return SkyImageList(
            [images['counts'], background, exclusion, significance])
Esempio n. 5
0
def image_lima(infile, outfile, theta, onoff, residual, overwrite):
    """
    Compute Li&Ma significance images for a given set of input images.
    """
    log.info('Reading {0}'.format(infile))
    data = SkyImageList.read(infile)
    if residual:
        data.background += data.model

    for t in theta:
        # Convert theta to pix
        theta_pix = t / data._ref_header['CDELT2']
        kernel = Tophat2DKernel(theta_pix)
        with np.errstate(invalid='ignore', divide='ignore'):
            if not onoff:
                result = compute_lima_image(data.counts, data.background,
                                            kernel, data.exposure)
            else:
                result = compute_lima_on_off_image(data.n_on, data.n_off,
                                                   data.a_on, data.a_off,
                                                   kernel)
        log.info('Computing derived images')
        if len(theta) > 1:
            outfile_ = outfile.replace('.fits', '_{0:.3f}.fits'.format(t))
        else:
            outfile_ = outfile

        log.info('Writing {0}'.format(outfile_))
        result.write(outfile_, header=data._ref_header, clobber=overwrite)
Esempio n. 6
0
def test_compute_lima_on_off_image():
    """
    Test Li & Ma image with snippet from the H.E.S.S. survey data.
    """
    filename = "$GAMMAPY_DATA/tests/unbundled/hess/survey/hess_survey_snippet.fits.gz"
    n_on = Map.read(filename, hdu="ON")
    n_off = Map.read(filename, hdu="OFF")
    a_on = Map.read(filename, hdu="ONEXPOSURE")
    a_off = Map.read(filename, hdu="OFFEXPOSURE")
    significance = Map.read(filename, hdu="SIGNIFICANCE")

    kernel = Tophat2DKernel(5)
    results = compute_lima_on_off_image(n_on, n_off, a_on, a_off, kernel)

    # Reproduce safe significance threshold from HESS software
    results["significance"].data[results["n_on"].data < 5] = 0

    # crop the image at the boundaries, because the reference image
    # is cut out from a large map, there is no way to reproduce the
    # result with regular boundary handling
    actual = results["significance"].crop(kernel.shape).data
    desired = significance.crop(kernel.shape).data

    # Set boundary to NaN in reference image
    assert_allclose(actual, desired, atol=1e-5)
Esempio n. 7
0
def background_skyimage_2fhl(counts):
    log.info('Computing background map.')
    images = GammaImages(counts.data, header=counts.wcs.to_header())

    source_kernel = Tophat2DKernel(5)
    source_kernel.normalize('peak')

    background_kernel = Ring2DKernel(20, 20)
    background_kernel.normalize('peak')

    ikbe = IKBE(
        images=images,
        source_kernel=source_kernel.array,
        background_kernel=background_kernel.array,
        significance_threshold=5,
        mask_dilation_radius=3,
    )

    mask_data, background_data = ikbe.run()

    mask = SkyMap.empty_like(counts)
    mask.data = mask_data

    background = SkyMap.empty_like(counts)
    background.data = background_data
    return mask, background
Esempio n. 8
0
    def estimate_excess_map(self, dataset):
        """Estimate excess and ts maps for single dataset.

        If exposure is defined, a flux map is also computed.

        Parameters
        ----------
        dataset : `MapDataset`
            Map dataset
        """

        pixel_size = np.mean(np.abs(dataset.counts.geom.wcs.wcs.cdelt))
        size = self.correlation_radius.deg / pixel_size
        kernel = Tophat2DKernel(size)

        counts_stat = convolved_map_dataset_counts_statistics(
            dataset, kernel, self.apply_mask_fit
        )

        geom = dataset.counts.geom.squash("energy")

        n_on = Map.from_geom(geom, data=counts_stat.n_on)
        bkg = Map.from_geom(geom, data=counts_stat.n_on - counts_stat.excess)
        excess = Map.from_geom(geom, data=counts_stat.excess)

        result = {"counts": n_on, "background": bkg, "excess": excess}

        tsmap = Map.from_geom(geom, data=counts_stat.ts)
        sqrt_ts = Map.from_geom(geom, data=counts_stat.sqrt_ts)
        result.update({"ts": tsmap, "sqrt_ts": sqrt_ts})

        err = Map.from_geom(geom, data=counts_stat.error * self.n_sigma)
        result.update({"err": err})

        if dataset.exposure:
            reco_exposure = estimate_exposure_reco_energy(dataset)
            reco_exposure = reco_exposure.sum_over_axes(keepdims=True)
            flux = excess / reco_exposure
            flux.quantity = flux.quantity.to("1 / (cm2 s)")
        else:
            flux = Map.from_geom(
                geom=dataset.counts.geom, data=np.nan * np.ones(dataset.data_shape)
            )
        result.update({"flux": flux})

        if "errn-errp" in self.selection_optional:
            errn = Map.from_geom(geom, data=counts_stat.compute_errn(self.n_sigma))
            errp = Map.from_geom(geom, data=counts_stat.compute_errp(self.n_sigma))
            result.update({"errn": errn, "errp": errp})

        if "ul" in self.selection_optional:
            ul = Map.from_geom(
                geom, data=counts_stat.compute_upper_limit(self.n_sigma_ul)
            )
            result.update({"ul": ul})

        return result
Esempio n. 9
0
def smooth_map(map, mask, smooth_kernal, smooth_scale, nan_flag):

    map[mask==0] = nan_flag

    if smooth_kernal=='box':
        kernel = Box2DKernel(smooth_scale)
    if smooth_kernal=='tophat':
        kernel = Tophat2DKernel(smooth_scale/2)

    return convolution.convolve_fft(map, kernel, normalize_kernel=True, ignore_edge_zeros=True, interpolate_nan=True) 
Esempio n. 10
0
    def _estimate_exclusion(self, counts, significance):
        radius = self.parameters["mask_dilation_radius"].deg
        scale = counts.geom.pixel_scales.mean().deg
        mask_dilation_radius_pix = radius / scale

        structure = np.array(Tophat2DKernel(mask_dilation_radius_pix))

        mask = (significance.data < self.parameters["significance_threshold"]
                ) | np.isnan(significance.data)
        mask = scipy.ndimage.binary_erosion(mask, structure, border_value=1)

        return counts.copy(data=mask.astype("float"))
Esempio n. 11
0
def test_compute_lima_image():
    """
    Test Li & Ma image against TS image for Tophat kernel
    """
    filename = "$GAMMAPY_DATA/tests/unbundled/poisson_stats_image/input_all.fits.gz"
    counts = Map.read(filename, hdu="counts")
    background = Map.read(filename, hdu="background")

    kernel = Tophat2DKernel(5)
    result_lima = compute_lima_image(counts, background, kernel)

    assert_allclose(result_lima["significance"].data[100, 100], 30.814916, atol=1e-3)
    assert_allclose(result_lima["significance"].data[1, 1], 0.164, atol=1e-3)
Esempio n. 12
0
def unit_tophat(radius):
    '''
    Return a tophat kernel array of unit height.
        
    Parameters
    ----------
    
    Returns
    -------     

    '''
    kernel = Tophat2DKernel(radius).array
    kernel[kernel != 0] = 1
    return kernel
Esempio n. 13
0
    def smooth(self, kernel='gauss', radius=0.1 * u.deg, **kwargs):
        """
        Smooth the image (works on and returns a copy).

        The definition of the smoothing parameter radius is equivalent to the
        one that is used in ds9 (see `ds9 smoothing <http://ds9.si.edu/doc/ref/how.html#Smoothing>`_).

        Parameters
        ----------
        kernel : {'gauss', 'disk', 'box'}
            Kernel shape
        radius : `~astropy.units.Quantity` or float
            Smoothing width given as quantity or float. If a float is given it
            interpreted as smoothing width in pixels. If an (angular) quantity
            is given it converted to pixels using `SkyImage.wcs_pixel_scale()`.
        kwargs : dict
            Keyword arguments passed to `~scipy.ndimage.uniform_filter`
            ('box'), `~scipy.ndimage.gaussian_filter` ('gauss') or
            `~scipy.ndimage.convolve` ('disk').

        Returns
        -------
        image : `SkyImage`
            Smoothed image (a copy, the original object is unchanged).
        """
        from scipy.ndimage import gaussian_filter, uniform_filter
        from scipy.ndimage import convolve
        from scipy.stats import gmean

        image = self.copy()

        if isinstance(radius, u.Quantity):
            # use geometric mean if x an y pixel scale differ
            radius = gmean((radius / self.wcs_pixel_scale()).value)

        if kernel == 'gauss':
            width = radius / 2.
            image.data = gaussian_filter(self.data, width, **kwargs)
        elif kernel == 'disk':
            width = 2 * radius + 1
            disk = Tophat2DKernel(width)
            disk.normalize('integral')
            image.data = convolve(self.data, disk.array, **kwargs)
        elif kernel == 'box':
            width = 2 * radius + 1
            image.data = uniform_filter(self.data, width, **kwargs)
        else:
            raise ValueError('Invalid option kernel = {}'.format(kernel))

        return image
Esempio n. 14
0
    def make_significance_cube(self, radius):
        """Make the significance cube from the counts and bkg cubes.

        Parameters
        ----------
        radius : float
            Disk radius in pixels.
        """
        disk = Tophat2DKernel(radius)
        disk.normalize('peak')
        list_kernel = [disk.array] * (len(self.significance_cube.energies()))
        counts = self.counts_cube.convolve(list_kernel)
        bkg = self.bkg_cube.convolve(list_kernel)
        self.significance_cube.data = significance(counts.data, bkg.data)
Esempio n. 15
0
    def smooth(self, radius, kernel='gauss', **kwargs):
        """
        Smooth the image (works on a 2D image and returns a copy).

        The definition of the smoothing parameter radius is equivalent to the
        one that is used in ds9 (see `ds9 smoothing <http://ds9.si.edu/doc/ref/how.html#Smoothing>`_).

        Parameters
        ----------
        radius : `~astropy.units.Quantity` or float
            Smoothing width given as quantity or float. If a float is given it
            interpreted as smoothing width in pixels. If an (angular) quantity
            is given it converted to pixels using `geom.wcs.wcs.cdelt`.
        kernel : {'gauss', 'disk', 'box'}
            Kernel shape
        kwargs : dict
            Keyword arguments passed to `~scipy.ndimage.uniform_filter`
            ('box'), `~scipy.ndimage.gaussian_filter` ('gauss') or
            `~scipy.ndimage.convolve` ('disk').

        Returns
        -------
        image : `WcsNDMap`
            Smoothed image (a copy, the original object is unchanged).
        """
        from scipy.ndimage import gaussian_filter, uniform_filter, convolve

        if not self.geom.is_image:
            raise ValueError('Only supported on 2D maps')

        if isinstance(radius, Quantity):
            radius = (radius.to('deg') / self.geom.pixel_scales.mean()).value

        if kernel == 'gauss':
            width = radius / 2.
            data = gaussian_filter(self.data, width, **kwargs)
        elif kernel == 'disk':
            width = 2 * radius + 1
            disk = Tophat2DKernel(width)
            disk.normalize('integral')
            data = convolve(self.data, disk.array, **kwargs)
        elif kernel == 'box':
            width = 2 * radius + 1
            data = uniform_filter(self.data, width, **kwargs)
        else:
            raise ValueError('Invalid option kernel = {}'.format(kernel))

        image = copy.copy(self)
        image.data = data
        return image
Esempio n. 16
0
def plot_residual_distribution(dataset, obs_id):
    # plot residual significance distribution
    model = dataset.models[1]

    if model.tag == "SkyDiffuseCube":
        log.info(f"SkyDiffuseCube: no spectral model to plot")
    else:
        tophat_2D_kernel = Tophat2DKernel(5)
        l_m = lima.compute_lima_image(
            dataset.counts.sum_over_axes(keepdims=False),
            dataset.npred().sum_over_axes(keepdims=False),
            tophat_2D_kernel,
        )
        sig_resid = l_m["significance"].data[np.isfinite(
            l_m["significance"].data)]

        #    resid = dataset.residuals()
        #    sig_resid = resid.data[np.isfinite(resid.data)]

        plt.hist(
            sig_resid,
            density=True,
            alpha=0.5,
            color="red",
            bins=100,
        )

        mu, std = norm.fit(sig_resid)
        # replace with log.info()
        print("Fit results: mu = {:.2f}, std = {:.2f}".format(mu, std))
        x = np.linspace(-8, 8, 50)
        p = norm.pdf(x, mu, std)
        plt.plot(
            x,
            p,
            lw=2,
            color="black",
            label="Fit results: mu = {:.2f}, std = {:.2f}".format(mu, std),
        )
        plt.legend()
        plt.xlabel("Significance")
        plt.yscale("log")
        plt.ylim(1e-5, 1)
        xmin, xmax = np.min(sig_resid), np.max(sig_resid)
        plt.xlim(xmin, xmax)

        obs_id = int(obs_id)
        filename = f"results/models/{model.name}/plots/residuals-distribution/residuals-distribution_{obs_id:04d}.png"
        save_figure(filename)
Esempio n. 17
0
def kappag_map_bin(N2d, mask, fraction, z_l1, z_l2, z_s, cosmo, smooth_kernal,
                   smooth_scale):
    """
    Calculate kappa_g map for a lens redshift bin from z_l1 to z_l2 
    and a source redshift of z_s.
    """

    c_light = 3.0e5
    if smooth_kernal == 'box':
        kernel = Box2DKernel(smooth_scale)
    if smooth_kernal == 'tophat':
        kernel = Tophat2DKernel(smooth_scale / 2)

    # make 2D galaxy over-density maps ########
    if fraction == None:
        fraction = mask.copy()

    N2d = N2d * mask

    area_fraction = np.sum(fraction[mask == 1]) / len(fraction[mask == 1])

    dN2d = N2d * 0.0
    ave = np.mean(N2d[mask == 1]) / area_fraction
    dN2d[mask == 1] = (N2d[mask == 1] -
                       (ave * fraction[mask == 1])) / (ave *
                                                       fraction[mask == 1])
    # print(ave)

    # make 2D kappa_g maps ########

    zl1_cd = cd.comoving_distance(z_l1, **cosmo)  # Mpc
    zl2_cd = cd.comoving_distance(z_l2, **cosmo)  # Mpc
    zs_cd = cd.comoving_distance(z_s, **cosmo)  # Mpc
    delta_cd = zl2_cd - zl1_cd
    const = ((100. * cosmo['h'])**2 *
             cosmo['omega_M_0']) * (3 / 2.) * (1 / c_light**2)

    integ = lens_weight(np.array([z_l1, z_l2]), z_s, cosmo)[0]

    temp_dN = dN2d * 1.0
    temp_dN[mask == 0] = 'nan'
    kg = const * delta_cd * integ * convolution.convolve_fft(
        temp_dN,
        kernel,
        normalize_kernel=True,
        ignore_edge_zeros=True,
        interpolate_nan=True)
    kg[mask == 0] = 0
    return kg
Esempio n. 18
0
    def smooth(self, radius, kernel="gauss", **kwargs):
        """
        Smooth the image (works on a 2D image and returns a copy).

        The definition of the smoothing parameter radius is equivalent to the
        one that is used in ds9 (see `ds9 smoothing <http://ds9.si.edu/doc/ref/how.html#Smoothing>`_).

        Parameters
        ----------
        radius : `~astropy.units.Quantity` or float
            Smoothing width given as quantity or float. If a float is given it
            interpreted as smoothing width in pixels. If an (angular) quantity
            is given it converted to pixels using ``geom.wcs.wcs.cdelt``.
        kernel : {'gauss', 'disk', 'box'}
            Kernel shape
        kwargs : dict
            Keyword arguments passed to `~scipy.ndimage.uniform_filter`
            ('box'), `~scipy.ndimage.gaussian_filter` ('gauss') or
            `~scipy.ndimage.convolve` ('disk').

        Returns
        -------
        image : `WcsNDMap`
            Smoothed image (a copy, the original object is unchanged).
        """
        from scipy.ndimage import gaussian_filter, uniform_filter, convolve

        if isinstance(radius, u.Quantity):
            radius = (radius.to("deg") / self.geom.pixel_scales.mean()).value

        smoothed_data = np.empty_like(self.data)

        for img, idx in self.iter_by_image():
            if kernel == "gauss":
                width = radius / 2.
                data = gaussian_filter(img, width, **kwargs)
            elif kernel == "disk":
                width = 2 * radius + 1
                disk = Tophat2DKernel(width)
                disk.normalize("integral")
                data = convolve(img, disk.array, **kwargs)
            elif kernel == "box":
                width = 2 * radius + 1
                data = uniform_filter(img, width, **kwargs)
            else:
                raise ValueError("Invalid kernel: {!r}".format(kernel))
            smoothed_data[idx] = data

        return self._init_copy(data=smoothed_data)
Esempio n. 19
0
def isophotal(level):
    #data_re=rebin(data[0].data,(100,100),)
    global data
    tophat_kernel = Tophat2DKernel(5)
    data_re = convolve(data, tophat_kernel)
    #data_re = data
    #print(level)
    pix_ind = where(floor(log10(data_re) * 50) == floor(log10(level) * 50))
    #print(pix_ind)
    X = array(pix_ind[1])
    Y = array(pix_ind[0])
    #plt.plot(X,Y,'.')
    ipars = fit_ellipse(X, Y)
    ell = plot_ellipse(plt.gca(), ipars)
    plt.draw()
Esempio n. 20
0
def test_compute_lima_map():
    """
    Test Li&Ma map against TS map for Tophat kernel
    """
    data = load_poisson_stats_image(extra_info=True)

    kernel = Tophat2DKernel(5)
    result_lima = compute_lima_map(data['counts'], data['background'], kernel,
                                   data['exposure'])
    kernel.normalize('integral')
    result_ts = compute_ts_map(data['counts'], data['background'],
                               data['exposure'], kernel)

    assert_allclose(result_ts.sqrt_ts, result_lima.significance, atol=1E-3)
    assert_allclose(result_ts.amplitude, result_lima.flux, atol=3E-12)
Esempio n. 21
0
    def significance_image(self, radius):
        """Make the significance image from the counts and bkg images.

        Parameters
        ----------
        radius : float
            Disk radius in pixels.
        """
        image = SkyImage.empty_like(self.empty_image)
        disk = Tophat2DKernel(radius)
        disk.normalize('peak')
        counts = self.images["counts"].convolve(disk.array)
        bkg = self.images["bkg"].convolve(disk.array)
        image.data = significance(counts.data, bkg.data)
        self.images["significance"] = image
Esempio n. 22
0
    def _exposure_on_cube(self, exposure_on, kernels):
        """Compute on exposure cube.

        Calculated by convolving the on exposure with a tophat
        of radius theta, and stacking all images along the third dimension.
        """
        from scipy.ndimage import convolve

        scale = exposure_on.geom.pixel_scales[0].to('deg')
        theta = self.parameters['theta'] * scale

        tophat = Tophat2DKernel(theta.value)
        tophat.normalize('peak')
        exposure_on = convolve(exposure_on.data, tophat.array)
        exposure_on_cube = np.repeat(exposure_on[:, :, np.newaxis], len(kernels), axis=2)
        return exposure_on_cube
Esempio n. 23
0
    def _exposure_on_cube(self, exposure_on, kernels):
        """Compute on exposure cube.

        Calculated by convolving the on exposure with a tophat
        of radius theta, and stacking all images along the third dimension.
        """
        scale = exposure_on.geom.pixel_scales[0].to("deg")
        theta = self.parameters["theta"] * scale

        tophat = Tophat2DKernel(theta.value)
        tophat.normalize("peak")
        exposure_on = exposure_on.convolve(tophat.array)
        exposure_on_cube = np.repeat(
            exposure_on.data[:, :, np.newaxis], len(kernels), axis=2
        )
        return exposure_on_cube
Esempio n. 24
0
    def smooth(self, width, kernel="gauss", **kwargs):
        """
        Smooth the image (works on a 2D image and returns a copy).


        Parameters
        ----------
        width : `~astropy.units.Quantity` or float
            Smoothing width given as quantity or float. If a float is given it
            interpreted as smoothing width in pixels. If an (angular) quantity
            is given it converted to pixels using ``geom.wcs.wcs.cdelt``.
            It corresponds to the standard deviation in case of a Gaussian kernel,
            the radius in case of a disk kernel, and the side length in case
            of a box kernel.
        kernel : {'gauss', 'disk', 'box'}
            Kernel shape
        kwargs : dict
            Keyword arguments passed to `~scipy.ndimage.uniform_filter`
            ('box'), `~scipy.ndimage.gaussian_filter` ('gauss') or
            `~scipy.ndimage.convolve` ('disk').

        Returns
        -------
        image : `WcsNDMap`
            Smoothed image (a copy, the original object is unchanged).
        """
        from scipy.ndimage import gaussian_filter, uniform_filter, convolve

        if isinstance(width, u.Quantity):
            width = (width.to("deg") / self.geom.pixel_scales.mean()).value

        smoothed_data = np.empty_like(self.data)

        for img, idx in self.iter_by_image():
            if kernel == "gauss":
                data = gaussian_filter(img, width, **kwargs)
            elif kernel == "disk":
                disk = Tophat2DKernel(width)
                disk.normalize("integral")
                data = convolve(img, disk.array, **kwargs)
            elif kernel == "box":
                data = uniform_filter(img, width, **kwargs)
            else:
                raise ValueError("Invalid kernel: {!r}".format(kernel))
            smoothed_data[idx] = data

        return self._init_copy(data=smoothed_data)
Esempio n. 25
0
    def smooth(self, width, kernel="gauss", **kwargs):
        """Smooth the map.

        Iterates over 2D image planes, processing one at a time.

        Parameters
        ----------
        width : `~astropy.units.Quantity`, str or float
            Smoothing width given as quantity or float. If a float is given it
            interpreted as smoothing width in pixels. If an (angular) quantity
            is given it converted to pixels using ``geom.wcs.wcs.cdelt``.
            It corresponds to the standard deviation in case of a Gaussian kernel,
            the radius in case of a disk kernel, and the side length in case
            of a box kernel.
        kernel : {'gauss', 'disk', 'box'}
            Kernel shape
        kwargs : dict
            Keyword arguments passed to `~scipy.ndimage.uniform_filter`
            ('box'), `~scipy.ndimage.gaussian_filter` ('gauss') or
            `~scipy.ndimage.convolve` ('disk').

        Returns
        -------
        image : `WcsNDMap`
            Smoothed image (a copy, the original object is unchanged).
        """
        if isinstance(width, (u.Quantity, str)):
            width = u.Quantity(width) / self.geom.pixel_scales.mean()
            width = width.to_value("")

        smoothed_data = np.empty(self.data.shape, dtype=float)

        for img, idx in self.iter_by_image():
            img = img.astype(float)
            if kernel == "gauss":
                data = scipy.ndimage.gaussian_filter(img, width, **kwargs)
            elif kernel == "disk":
                disk = Tophat2DKernel(width)
                disk.normalize("integral")
                data = scipy.ndimage.convolve(img, disk.array, **kwargs)
            elif kernel == "box":
                data = scipy.ndimage.uniform_filter(img, width, **kwargs)
            else:
                raise ValueError(f"Invalid kernel: {kernel!r}")
            smoothed_data[idx] = data

        return self._init_copy(data=smoothed_data)
Esempio n. 26
0
    def make_cubes(self, dataset):
        """Make acceptance, off acceptance, off counts cubes

        Parameters
        ----------
        dataset : `~gammapy.cube.fit.MapDataset`
            Input map dataset.

        Returns
        -------
        cubes : dict of `~gammapy.maps.WcsNDMap`
            Dictionary containing `counts_off`, `acceptance` and `acceptance_off` cubes.
        """

        counts = dataset.counts
        background = dataset.background_model.map
        kernels = self.kernels(counts)

        if self.exclusion_mask is not None:
            # reproject exclusion mask
            coords = counts.geom.get_coord()
            data = self.exclusion_mask.get_by_coord(coords)
            exclusion = Map.from_geom(geom=counts.geom, data=data)
        else:
            data = np.ones(counts.geom.data_shape, dtype=bool)
            exclusion = Map.from_geom(geom=counts.geom, data=data)

        cubes = {}
        cubes["counts_off"] = scale_cube(
            (counts.data * exclusion.data)[0, Ellipsis], kernels
        )
        cubes["acceptance_off"] = scale_cube(
            (background.data * exclusion.data)[0, Ellipsis], kernels
        )

        scale = background.geom.pixel_scales[0].to("deg")
        theta = self.theta * scale
        tophat = Tophat2DKernel(theta.value)
        tophat.normalize("peak")
        acceptance = background.convolve(tophat.array)
        acceptance_data = acceptance.data[0, Ellipsis]
        cubes["acceptance"] = np.repeat(
            acceptance_data[Ellipsis, np.newaxis], len(kernels), axis=2
        )

        return cubes
Esempio n. 27
0
def tophat2D_smooth(data, scale=45, mask=False):
    dims = _get_dims(data)

    tophat2d_kernel = Tophat2DKernel(scale).array
    sc_convolve = lambda data: convolve(data, tophat2d_kernel, mode='same')

    if mask:
        data_masked = data.where(data[mask_vars[dims]])
    else:
        data_masked = data.fillna(0.)

    return xr.apply_ufunc(sc_convolve,
                          data_masked,
                          vectorize=True,
                          dask='parallelized',
                          input_core_dims=[dims],
                          output_core_dims=[dims],
                          output_dtypes=[data.dtype])
Esempio n. 28
0
def test_compute_lima_image():
    """
    Test Li&Ma image against TS image for Tophat kernel
    """
    filename = '$GAMMAPY_EXTRA/test_datasets/unbundled/poisson_stats_image/input_all.fits.gz'
    images = SkyImageList.read(filename)

    kernel = Tophat2DKernel(5)
    result_lima = compute_lima_image(
        images['counts'], images['background'], kernel, images['exposure'],
    )
    kernel.normalize('integral')
    result_ts = compute_ts_image(
        images['counts'], images['background'], images['exposure'], kernel,
    )

    assert_allclose(result_ts['sqrt_ts'], result_lima['significance'], atol=1E-3)
    assert_allclose(result_ts['amplitude'], result_lima['flux'], atol=3E-12)
Esempio n. 29
0
    def _exposure_on_cube(self, images, kernels):
        """
        Compute on exposure cube, by convolving the on exposure with a tophat
        of radius theta, and stacking all images along the third dimension.
        """
        from scipy.ndimage import convolve

        exposure_on = images['exposure_on']
        scale = exposure_on.wcs_pixel_scale()[0]
        theta = self.parameters['theta'] * scale

        tophat = Tophat2DKernel(theta.value)
        tophat.normalize('peak')
        exposure_on = convolve(exposure_on, tophat.array)
        exposure_on_cube = np.repeat(exposure_on[:, :, np.newaxis],
                                     len(kernels),
                                     axis=2)
        return exposure_on_cube
Esempio n. 30
0
def test_compute_lima_on_off_map():
    """
    Test Li&Ma map with snippet from the H.E.S.S. survey data.
    """
    filename = gammapy_extra.filename('test_datasets/unbundled/hess/survey/'
                                      'hess_survey_snippet.fits.gz')
    maps = SkyImageCollection.read(filename)

    kernel = Tophat2DKernel(5)

    result_lima = compute_lima_on_off_map(maps.on.data, maps.off.data, maps.onexposure.data,
                                          maps.offexposure.data, kernel)

    # reproduce safe significance threshold from HESS software
    result_lima.significance.data[result_lima.n_on.data < 5] = 0

    # Set boundary to NaN in reference image
    maps.significance.data[np.isnan(result_lima.significance)] = np.nan
    assert_allclose(result_lima.significance, maps.significance, atol=1E-5)