Пример #1
0
def test_combine(tmpdir):
    """Test ligo-skymap-combine."""
    fn1 = str(tmpdir / 'skymap1.fits.gz')
    fn2 = str(tmpdir / 'skymap2.fits.gz')
    fn3 = str(tmpdir / 'joint_skymap.fits.gz')

    # generate a hemisphere of constant probability
    nside1 = 32
    npix1 = ah.nside_to_npix(nside1)
    m1 = np.zeros(npix1)
    disc_idx = hp.query_disc(nside1, (1, 0, 0), np.pi / 2)
    m1[disc_idx] = 1
    m1 /= m1.sum()
    hp.write_map(fn1,
                 m1,
                 column_names=['PROBABILITY'],
                 extra_header=[('INSTRUME', 'X1')])

    # generate another hemisphere of constant probability
    # but with higher resolution and rotated 90 degrees
    nside2 = 64
    npix2 = ah.nside_to_npix(nside2)
    m2 = np.zeros(npix2)
    disc_idx = hp.query_disc(nside2, (0, 1, 0), np.pi / 2)
    m2[disc_idx] = 1
    m2 /= m2.sum()
    hp.write_map(fn2,
                 m2,
                 column_names=['PROBABILITY'],
                 extra_header=[('INSTRUME', 'Y1')])

    run_entry_point('ligo-skymap-combine', fn1, fn2, fn3)

    m3 = hp.read_map(fn3, nest=True)
    npix3 = len(m3)
    nside3 = ah.npix_to_nside(npix3)
    pix_area3 = ah.nside_to_pixel_area(nside3).to_value(u.sr)

    # resolution must match the highest original resolution
    assert npix3 == npix2
    # probability must be normalized to 1
    assert m3.sum() == pytest.approx(1)
    # support must be ¼ of the sphere
    tolerance = 10 * ah.nside_to_pixel_area(nside1).to_value(u.sr)
    assert sum(m3 > 0) * pix_area3 == pytest.approx(np.pi, abs=tolerance)

    # generate a BAYESTAR-like map with mock distance information
    d_mu = np.zeros_like(m1)
    d_sigma = np.ones_like(m1)
    d_norm = np.ones_like(m1)
    io.write_sky_map(fn1, [m1, d_mu, d_sigma, d_norm])

    run_entry_point('ligo-skymap-combine', fn1, fn2, fn3)

    m3, meta3 = io.read_sky_map(fn3, nest=True, distances=True)

    # check that marginal distance moments match what was simulated
    mean, std, _ = distance.parameters_to_moments(d_mu[0], d_sigma[0])
    assert meta3['distmean'] == pytest.approx(mean)
    assert meta3['diststd'] == pytest.approx(std)
Пример #2
0
def test_from_valued_healpix_cells_bayestar():
    from astropy.io import fits
    fits_image_filename = './resources/bayestar.multiorder.fits'

    with fits.open(fits_image_filename) as hdul:
        hdul.info()
        hdul[1].columns

        data = hdul[1].data

    uniq = data['UNIQ']
    probdensity = data['PROBDENSITY']

    import astropy_healpix as ah
    import astropy.units as u

    level, ipix = ah.uniq_to_level_ipix(uniq)
    area = ah.nside_to_pixel_area(ah.level_to_nside(level)).to_value(
        u.steradian)

    prob = probdensity * area

    cumul_to = np.linspace(0.01, 2.0, num=10)

    for b in cumul_to:
        MOC.from_valued_healpix_cells(uniq,
                                      prob,
                                      12,
                                      cumul_from=0.0,
                                      cumul_to=b)
def main(args=None):
    p = parser()
    opts = parser().parse_args(args)

    import astropy_healpix as ah
    import astropy.units as u

    try:
        from mocpy import MOC
    except ImportError:
        p.error('This command-line tool requires mocpy >= 0.8.2. '
                'Please install it by running "pip install mocpy".')

    from ..io import read_sky_map

    # Read multi-order sky map
    skymap = read_sky_map(opts.input.name, moc=True)

    uniq = skymap['UNIQ']
    probdensity = skymap['PROBDENSITY']

    level, ipix = ah.uniq_to_level_ipix(uniq)
    area = ah.nside_to_pixel_area(
        ah.level_to_nside(level)).to_value(u.steradian)

    prob = probdensity * area

    # Create MOC
    contour_decimal = opts.contour / 100
    moc = MOC.from_valued_healpix_cells(
        uniq, prob, cumul_from=0.0, cumul_to=contour_decimal)

    # Write MOC
    moc.write(opts.output, format='fits', overwrite=True)
Пример #4
0
 def flat_bitmap(self):
     """Return flattened HEALPix representation."""
     m = np.empty(ah.nside_to_npix(ah.level_to_nside(self.order)))
     for nside, full_nside, ipix, ipix0, ipix1, samples in self.visit():
         pixarea = ah.nside_to_pixel_area(nside).to_value(u.sr)
         m[ipix0:ipix1] = len(samples) / pixarea
     return m
Пример #5
0
def input_skymap(order1, d_order, fraction):
    """Construct a test multi-resolution sky map, with values that are
    proportional to the NESTED pixel index.

    To make the test more interesting by mixing together multiple resolutions,
    part of the sky map is refined to a higher order.

    Parameters
    ----------
    order1 : int
        The HEALPix resolution order.
    d_order : int
        The increase in orer for part of the sky map.
    fraction : float
        The fraction of the original pixels to refine.

    """
    order2 = order1 + d_order
    npix1 = ah.nside_to_npix(ah.level_to_nside(order1))
    npix2 = ah.nside_to_npix(ah.level_to_nside(order2))
    ipix1 = np.arange(npix1)
    ipix2 = np.arange(npix2)

    # Create a random sky map.
    area = ah.nside_to_pixel_area(ah.level_to_nside(order1)).to_value(u.sr)
    probdensity = np.random.uniform(0, 1, npix1)
    prob = probdensity * area
    normalization = prob.sum()
    prob /= normalization
    probdensity /= normalization
    distmean = np.random.uniform(100, 110, npix1)
    diststd = np.random.uniform(0, 1 / np.sqrt(3) - 0.1, npix1) * distmean
    distmu, distsigma, distnorm = moments_to_parameters(distmean, diststd)
    assert np.all(np.isfinite(distmu))

    data1 = table.Table({
        'UNIQ': moc.nest2uniq(order1, ipix1),
        'PROBDENSITY': probdensity,
        'DISTMU': distmu,
        'DISTSIGMA': distsigma,
        'DISTNORM': distnorm
    })

    # Add some upsampled pixels.
    data2 = table.Table(np.repeat(data1, npix2 // npix1))
    data2['UNIQ'] = moc.nest2uniq(order2, ipix2)
    n = int(npix1 * (1 - fraction))
    result = table.vstack((data1[:n], data2[n * npix2 // npix1:]))

    # Add marginal distance mean and standard deviation.
    rbar = (prob * distmean).sum()
    r2bar = (prob * (np.square(diststd) + np.square(distmean))).sum()
    result.meta['distmean'] = rbar
    result.meta['diststd'] = np.sqrt(r2bar - np.square(rbar))

    return result
Пример #6
0
    def _build_moc_map(self):

        pts = np.column_stack((self._grb_phi, self._grb_theta))

        self._kde_map = Clustered2DSkyKDE(pts, jobs=12)

        data = self._kde_map.as_healpix(top_nside=self._npix)

        self._uniq = data["UNIQ"]
        self._prob_density = data["PROBDENSITY"]

        level, ipix = ah.uniq_to_level_ipix(self._uniq)
        area = ah.nside_to_pixel_area(ah.level_to_nside(level)).to_value(
            u.steradian)
        self._prob = self._prob_density * area
Пример #7
0
    def as_healpix(self, top_nside=16):
        """Return a HEALPix multi-order map of the posterior density."""
        post, nside, ipix = zip(*self._bayestar_adaptive_grid(
            top_nside=top_nside))
        post = np.asarray(list(post))
        nside = np.asarray(list(nside))
        ipix = np.asarray(list(ipix))

        # Make sure that sky map is normalized (it should be already)
        post /= np.sum(post * ah.nside_to_pixel_area(nside).to_value(u.sr))

        # Convert from NESTED to UNIQ pixel indices
        order = np.log2(nside).astype(int)
        uniq = moc.nest2uniq(order.astype(np.int8), ipix)

        # Done!
        return Table([uniq, post], names=['UNIQ', 'PROBDENSITY'], copy=False)
Пример #8
0
    def unseen_pixels(cls, map_name, hires=False):
        """
        Returns a list of UNSEEN healpix pixels contained in `map_name`.

        Parameters
        ----------
        map_name : ``str``
            Name of the map to be plotted. Use ``show_maps`` method
            to see a list of all available maps.
        hires : ``boolean``, optional
            Use high resolution map. If the map is not available locally,
            it is downloaded. Defaults to ``False``.
        """
        hpmap, nside, _ = cls._load_map(map_name, hires)
        unseen_pixels = np.where(np.isnan(hpmap))[0]
        npixels = len(unseen_pixels)

        if npixels:
            bad_area = npixels * nside_to_pixel_area(nside).to(u.deg**2)
            print("{} contains {} UNSEEN pixels ({})".format(
                map_name, npixels, bad_area))

        return unseen_pixels
Пример #9
0
def main(args=None):
    opts = parser().parse_args(args)

    # Late imports

    import numpy as np
    import matplotlib.pyplot as plt
    from matplotlib import rcParams
    from ..io import fits
    from .. import plot
    from .. import postprocess
    import astropy_healpix as ah
    from astropy.coordinates import SkyCoord
    from astropy.time import Time
    from astropy import units as u

    skymap, metadata = fits.read_sky_map(opts.input.name, nest=None)
    nside = ah.npix_to_nside(len(skymap))

    # Convert sky map from probability to probability per square degree.
    deg2perpix = ah.nside_to_pixel_area(nside).to_value(u.deg**2)
    probperdeg2 = skymap / deg2perpix

    axes_args = {}
    if opts.geo:
        axes_args['projection'] = 'geo'
        obstime = Time(metadata['gps_time'], format='gps').utc.isot
        axes_args['obstime'] = obstime
    else:
        axes_args['projection'] = 'astro'
    axes_args['projection'] += ' ' + opts.projection
    if opts.projection_center is not None:
        axes_args['center'] = SkyCoord(opts.projection_center)
    if opts.zoom_radius is not None:
        axes_args['radius'] = opts.zoom_radius
    ax = plt.axes(**axes_args)
    ax.grid()

    # Plot sky map.
    vmax = probperdeg2.max()
    img = ax.imshow_hpx((probperdeg2, 'ICRS'),
                        nested=metadata['nest'],
                        vmin=0.,
                        vmax=vmax)

    # Add colorbar.
    if opts.colorbar:
        cb = plot.colorbar(img)
        cb.set_label(r'prob. per deg$^2$')

    # Add contours.
    if opts.contour:
        cls = 100 * postprocess.find_greedy_credible_levels(skymap)
        cs = ax.contour_hpx((cls, 'ICRS'),
                            nested=metadata['nest'],
                            colors='k',
                            linewidths=0.5,
                            levels=opts.contour)
        fmt = r'%g\%%' if rcParams['text.usetex'] else '%g%%'
        plt.clabel(cs, fmt=fmt, fontsize=6, inline=True)

    # Add continents.
    if opts.geo:
        plt.plot(*plot.coastlines(),
                 color='0.5',
                 linewidth=0.5,
                 transform=ax.get_transform('world'))

    radecs = opts.radec
    if opts.inj_database:
        query = '''SELECT DISTINCT longitude, latitude FROM sim_inspiral AS si
                   INNER JOIN coinc_event_map AS cm1
                   ON (si.simulation_id = cm1.event_id)
                   INNER JOIN coinc_event_map AS cm2
                   ON (cm1.coinc_event_id = cm2.coinc_event_id)
                   WHERE cm2.event_id = ?
                   AND cm1.table_name = 'sim_inspiral'
                   AND cm2.table_name = 'coinc_event'
                   '''
        (ra,
         dec), = opts.inj_database.execute(query,
                                           (metadata['objid'], )).fetchall()
        radecs.append(np.rad2deg([ra, dec]).tolist())

    # Add markers (e.g., for injections or external triggers).
    for ra, dec in radecs:
        ax.plot_coord(SkyCoord(ra, dec, unit='deg'),
                      '*',
                      markerfacecolor='white',
                      markeredgecolor='black',
                      markersize=10)

    # Add a white outline to all text to make it stand out from the background.
    plot.outline_text(ax)

    if opts.annotate:
        text = []
        try:
            objid = metadata['objid']
        except KeyError:
            pass
        else:
            text.append('event ID: {}'.format(objid))
        if opts.contour:
            pp = np.round(opts.contour).astype(int)
            ii = np.round(
                np.searchsorted(np.sort(cls), opts.contour) *
                deg2perpix).astype(int)
            for i, p in zip(ii, pp):
                # FIXME: use Unicode symbol instead of TeX '$^2$'
                # because of broken fonts on Scientific Linux 7.
                text.append('{:d}% area: {:,d} deg²'.format(p, i))
        ax.text(1, 1, '\n'.join(text), transform=ax.transAxes, ha='right')

    # Show or save output.
    opts.output()
Пример #10
0
fits_image_filename = './../../resources/bayestar.multiorder.fits'    

with fits.open(fits_image_filename) as hdul:
    hdul.info()
    hdul[1].columns
    
    data = hdul[1].data

uniq=data['UNIQ']
probdensity=data['PROBDENSITY']

import astropy_healpix as ah
import astropy.units as u

level, ipix = ah.uniq_to_level_ipix(uniq)
area = ah.nside_to_pixel_area(ah.level_to_nside(level)).to_value(u.steradian)

prob = probdensity * area

from mocpy import MOC

import numpy as np
cumul_to = np.linspace(0.5, 0.9, 5)[::-1]
colors = ['blue', 'green', 'yellow', 'orange', 'red']
mocs = [MOC.from_valued_healpix_cells(uniq, prob, cumul_to=c) for c in cumul_to]


from mocpy import World2ScreenMPL
from astropy.coordinates import Angle, SkyCoord
import astropy.units as u
# Plot the MOC using matplotlib
Пример #11
0
def find_ellipse(prob, cl=90, projection='ARC', nest=False):
    """For a HEALPix map, find an ellipse that contains a given probability.

    The orientation is defined as the angle of the semimajor axis
    counterclockwise from west on the plane of the sky. If you think of the
    semimajor distance as the width of the ellipse, then the orientation is the
    clockwise rotation relative to the image x-axis. Equivalently, the
    orientation is the position angle of the semi-minor axis.

    These conventions match the definitions used in DS9 region files [1]_ and
    Aladin drawing commands [2]_.

    Parameters
    ----------
    prob : np.ndarray, astropy.table.Table
        The HEALPix probability map, either as a full rank explicit array
        or as a multi-order map.
    cl : float
        The desired credible level (default: 90).
    projection : str, optional
        The WCS projection (default: 'ARC', or zenithal equidistant).
        For a list of possible values, see the Astropy documentation [3]_.
    nest : bool
        HEALPix pixel ordering (default: False, or ring ordering).

    Returns
    -------
    ra : float
        The ellipse center right ascension in degrees.
    dec : float
        The ellipse center right ascension in degrees.
    a : float
        The lenth of the semimajor axis in degrees.
    b : float
        The length of the semiminor axis in degrees.
    pa : float
        The orientation of the ellipse axis on the plane of the sky in degrees.
    area : float
        The area of the ellipse in square degrees.

    Notes
    -----
    The center of the ellipse is the median a posteriori sky position. The
    length and orientation of the semi-major and semi-minor axes are measured
    as follows:

    1. The sky map is transformed to a WCS projection that may be specified by
       the caller. The default projection is ``ARC`` (zenithal equidistant), in
       which radial distances are proportional to the physical angular
       separation from the center point.
    2. A 1-sigma ellipse is estimated by calculating the covariance matrix in
       the projected image plane using three rounds of sigma clipping to reject
       distant outlier points.
    3. The 1-sigma ellipse is inflated until it encloses an integrated
       probability of ``cl`` (default: 90%).

    The function returns a tuple of the right ascension, declination,
    semi-major distance, semi-minor distance, and orientation angle, all in
    degrees.

    References
    ----------
    .. [1] http://ds9.si.edu/doc/ref/region.html
    .. [2] http://aladin.u-strasbg.fr/java/AladinScriptManual.gml#draw
    .. [3] http://docs.astropy.org/en/stable/wcs/index.html#supported-projections

    Examples
    --------
    **Example 1**

    First, we need some imports.

    >>> from astropy.io import fits
    >>> from astropy.utils.data import download_file
    >>> from astropy.wcs import WCS
    >>> import healpy as hp
    >>> from reproject import reproject_from_healpix
    >>> import subprocess

    Next, we download the BAYESTAR sky map for GW170817 from the
    LIGO Document Control Center.

    >>> url = 'https://dcc.ligo.org/public/0146/G1701985/001/bayestar.fits.gz'  # doctest: +SKIP
    >>> filename = download_file(url, cache=True, show_progress=False)  # doctest: +SKIP
    >>> _, healpix_hdu = fits.open(filename)  # doctest: +SKIP
    >>> prob = hp.read_map(healpix_hdu, verbose=False)  # doctest: +SKIP

    Then, we calculate ellipse and write it to a DS9 region file.

    >>> ra, dec, a, b, pa, area = find_ellipse(prob)  # doctest: +SKIP
    >>> print(*np.around([ra, dec, a, b, pa, area], 5))  # doctest: +SKIP
    195.03732 -19.29358 8.66545 1.1793 63.61698 32.07665
    >>> s = 'fk5;ellipse({},{},{},{},{})'.format(ra, dec, a, b, pa)  # doctest: +SKIP
    >>> open('ds9.reg', 'w').write(s)  # doctest: +SKIP

    Then, we reproject a small patch of the HEALPix map, and save it to a file.

    >>> wcs = WCS()  # doctest: +SKIP
    >>> wcs.wcs.ctype = ['RA---ARC', 'DEC--ARC']  # doctest: +SKIP
    >>> wcs.wcs.crval = [ra, dec]  # doctest: +SKIP
    >>> wcs.wcs.crpix = [128, 128]  # doctest: +SKIP
    >>> wcs.wcs.cdelt = [-0.1, 0.1]  # doctest: +SKIP
    >>> img, _ = reproject_from_healpix(healpix_hdu, wcs, [256, 256])  # doctest: +SKIP
    >>> img_hdu = fits.ImageHDU(img, wcs.to_header())  # doctest: +SKIP
    >>> img_hdu.writeto('skymap.fits')  # doctest: +SKIP

    Now open the image and region file in DS9. You should find that the ellipse
    encloses the probability hot spot. You can load the sky map and region file
    from the command line:

    .. code-block:: sh

        $ ds9 skymap.fits -region ds9.reg

    Or you can do this manually:

        1. Open DS9.
        2. Open the sky map: select "File->Open..." and choose ``skymap.fits``
           from the dialog box.
        3. Open the region file: select "Regions->Load Regions..." and choose
           ``ds9.reg`` from the dialog box.

    Now open the image and region file in Aladin.

        1. Open Aladin.
        2. Open the sky map: select "File->Load Local File..." and choose
           ``skymap.fits`` from the dialog box.
        3. Open the sky map: select "File->Load Local File..." and choose
           ``ds9.reg`` from the dialog box.

    You can also compare the original HEALPix file with the ellipse in Aladin:

        1. Open Aladin.
        2. Open the HEALPix file by pasting the URL from the top of this
           example in the Command field at the top of the window and hitting
           return, or by selecting "File->Load Direct URL...", pasting the URL,
           and clicking "Submit."
        3. Open the sky map: select "File->Load Local File..." and choose
           ``ds9.reg`` from the dialog box.

    **Example 2**

    This example shows that we get approximately the same answer for GW171087
    if we read it in as a multi-order map.

    >>> from ..io import read_sky_map  # doctest: +SKIP
    >>> skymap_moc = read_sky_map(healpix_hdu, moc=True)  # doctest: +SKIP
    >>> ellipse = find_ellipse(skymap_moc)  # doctest: +SKIP
    >>> print(*np.around(ellipse, 5))  # doctest: +SKIP
    195.03709 -19.27589 8.67611 1.18167 63.60454 32.08015

    **Example 3**

    I'm not showing the `ra` or `pa` output from the examples below because
    the right ascension is arbitary when dec=90° and the position angle is
    arbitrary when a=b; their arbitrary values may vary depending on your math
    library. Also, I add 0.0 to the outputs because on some platforms you tend
    to get values of dec or pa that get rounded to -0.0, which is within
    numerical precision but would break the doctests (see
    https://stackoverflow.com/questions/11010683).

    This is an example sky map that is uniform in sin(theta) out to a given
    radius in degrees. The 90% credible radius should be 0.9 * radius. (There
    will be deviations for small radius due to finite resolution.)

    >>> def make_uniform_in_sin_theta(radius, nside=512):
    ...     npix = ah.nside_to_npix(nside)
    ...     theta, phi = hp.pix2ang(nside, np.arange(npix))
    ...     theta_max = np.deg2rad(radius)
    ...     prob = np.where(theta <= theta_max, 1 / np.sin(theta), 0)
    ...     return prob / prob.sum()
    ...

    >>> prob = make_uniform_in_sin_theta(1)
    >>> ra, dec, a, b, pa, area = find_ellipse(prob)
    >>> dec, a, b, area  # doctest: +FLOAT_CMP
    (89.90862520480792, 0.8703361458208101, 0.8703357768874356, 2.3788811576269793)

    >>> prob = make_uniform_in_sin_theta(10)
    >>> ra, dec, a, b, pa, area = find_ellipse(prob)
    >>> dec, a, b, area  # doctest: +FLOAT_CMP
    (89.90827657529562, 9.024846562072119, 9.024842703023802, 255.11972196535515)

    >>> prob = make_uniform_in_sin_theta(120)
    >>> ra, dec, a, b, pa, area = find_ellipse(prob)
    >>> dec, a, b, area  # doctest: +FLOAT_CMP
    (90.0, 107.9745037610576, 107.97450376105758, 26988.70467497216)

    **Example 4**

    These are approximately Gaussian distributions.

    >>> from scipy import stats
    >>> def make_gaussian(mean, cov, nside=512):
    ...     npix = ah.nside_to_npix(nside)
    ...     xyz = np.transpose(hp.pix2vec(nside, np.arange(npix)))
    ...     dist = stats.multivariate_normal(mean, cov)
    ...     prob = dist.pdf(xyz)
    ...     return prob / prob.sum()
    ...

    This one is centered at RA=45°, Dec=0° and has a standard deviation of ~1°.

    >>> prob = make_gaussian(
    ...     [1/np.sqrt(2), 1/np.sqrt(2), 0],
    ...     np.square(np.deg2rad(1)))
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (45.0, 0.0, 2.1424077148886744, 2.1420790721225518, 90.0, 14.467701995920123)

    This one is centered at RA=45°, Dec=0°, and is elongated in the north-south
    direction.

    >>> prob = make_gaussian(
    ...     [1/np.sqrt(2), 1/np.sqrt(2), 0],
    ...     np.diag(np.square(np.deg2rad([1, 1, 10]))))
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (44.99999999999999, 0.0, 13.58768882719899, 2.0829846178241853, 90.0, 88.57796576937031)

    This one is centered at RA=0°, Dec=0°, and is elongated in the east-west
    direction.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     np.diag(np.square(np.deg2rad([1, 10, 1]))))
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 13.583918022027149, 2.0823769912401433, 0.0, 88.54622940628761)

    This one is centered at RA=0°, Dec=0°, and has its long axis tilted about
    10° to the west of north.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     [[0.1, 0, 0],
    ...      [0, 0.1, -0.15],
    ...      [0, -0.15, 1]])
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 64.7713312709293, 33.50754131182681, 80.78231196786838, 6372.344658663038)

    This one is centered at RA=0°, Dec=0°, and has its long axis tilted about
    10° to the east of north.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     [[0.1, 0, 0],
    ...      [0, 0.1, 0.15],
    ...      [0, 0.15, 1]])
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 64.77133127093047, 33.50754131182745, 99.21768803213159, 6372.344658663096)

    This one is centered at RA=0°, Dec=0°, and has its long axis tilted about
    80° to the east of north.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     [[0.1, 0, 0],
    ...      [0, 1, 0.15],
    ...      [0, 0.15, 0.1]])
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 64.7756448603915, 33.509863018519894, 170.78252287327365, 6372.425731592412)

    This one is centered at RA=0°, Dec=0°, and has its long axis tilted about
    80° to the west of north.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     [[0.1, 0, 0],
    ...      [0, 1, -0.15],
    ...      [0, -0.15, 0.1]])
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 64.77564486039148, 33.50986301851987, 9.217477126726322, 6372.42573159241)

    """  # noqa: E501
    try:
        prob['UNIQ']
    except (IndexError, KeyError, ValueError):
        npix = len(prob)
        nside = ah.npix_to_nside(npix)
        ipix = range(npix)
        area = ah.nside_to_pixel_area(nside).to_value(u.deg**2)
    else:
        order, ipix = moc.uniq2nest(prob['UNIQ'])
        nside = 1 << order.astype(int)
        ipix = ipix.astype(int)
        area = ah.nside_to_pixel_area(nside).to_value(u.sr)
        prob = prob['PROBDENSITY'] * area
        area *= np.square(180 / np.pi)
        nest = True

    # Find median a posteriori sky position.
    xyz0 = [
        quantile(x, 0.5, weights=prob)
        for x in hp.pix2vec(nside, ipix, nest=nest)
    ]
    (ra, ), (dec, ) = hp.vec2ang(np.asarray(xyz0), lonlat=True)

    # Construct WCS with the specified projection
    # and centered on mean direction.
    w = WCS()
    w.wcs.crval = [ra, dec]
    w.wcs.ctype = ['RA---' + projection, 'DEC--' + projection]

    # Transform HEALPix to zenithal equidistant coordinates.
    xy = w.wcs_world2pix(
        np.transpose(hp.pix2ang(nside, ipix, nest=nest, lonlat=True)), 1)

    # Keep only values that were inside the projection.
    keep = np.logical_and.reduce(np.isfinite(xy), axis=1)
    xy = xy[keep]
    prob = prob[keep]
    if not np.isscalar(area):
        area = area[keep]

    # Find covariance matrix, performing three rounds of sigma-clipping
    # to reject outliers.
    keep = np.ones(len(xy), dtype=bool)
    for _ in range(3):
        c = np.cov(xy[keep], aweights=prob[keep], rowvar=False)
        nsigmas = np.sqrt(np.sum(xy.T * np.linalg.solve(c, xy.T), axis=0))
        keep &= (nsigmas < 3)

    # Find the number of sigma that enclose the cl% credible level.
    i = np.argsort(nsigmas)
    nsigmas = nsigmas[i]
    cls = np.cumsum(prob[i])
    if np.isscalar(area):
        careas = np.arange(1, len(i) + 1) * area
    else:
        careas = np.cumsum(area[i])
    nsigma = np.interp(1e-2 * cl, cls, nsigmas)
    area = np.interp(1e-2 * cl, cls, careas)

    # If the credible level is not within the projection,
    # then stop here and return all nans.
    if 1e-2 * cl > cls[-1]:
        return np.nan, np.nan, np.nan, np.nan, np.nan

    # Find the eigendecomposition of the covariance matrix.
    w, v = np.linalg.eigh(c)

    # Find the semi-minor and semi-major axes.
    b, a = nsigma * np.sqrt(w)

    # Find the position angle.
    pa = np.rad2deg(np.arctan2(*v[0]))

    # An ellipse is symmetric under rotations of 180°.
    # Return the smallest possible positive position angle.
    pa %= 180

    # Done!
    return ra, dec, a, b, pa, area
Пример #12
0
def contour(m, levels, nest=False, degrees=False, simplify=True):
    """Calculate contours from a HEALPix dataset.

    Parameters
    ----------
    m : `numpy.ndarray`
        The HEALPix dataset.
    levels : list
        The list of contour values.
    nest : bool, default=False
        Indicates whether the input sky map is in nested rather than
        ring-indexed HEALPix coordinates (default: ring).
    degrees : bool, default=False
        Whether the contours are in degrees instead of radians.
    simplify : bool, default=True
        Whether to simplify the paths.

    Returns
    -------
    list
        A list with the same length as `levels`.
        Each item is a list of disjoint polygons, of which each item is a
        list of points, of which each is a list consisting of the right
        ascension and declination.

    Examples
    --------
    A very simply example sky map...

    >>> nside = 32
    >>> npix = ah.nside_to_npix(nside)
    >>> ra, dec = hp.pix2ang(nside, np.arange(npix), lonlat=True)
    >>> m = dec
    >>> contour(m, [10, 20, 30], degrees=True)
    [[[[..., ...], ...], ...], ...]

    """
    # Infrequently used import
    import networkx as nx

    # Determine HEALPix resolution.
    npix = len(m)
    nside = ah.npix_to_nside(npix)
    min_area = 0.4 * ah.nside_to_pixel_area(nside).to_value(u.sr)

    neighbors = hp.get_all_neighbours(nside, np.arange(npix), nest=nest).T

    # Loop over the requested contours.
    paths = []
    for level in levels:

        # Find credible region.
        indicator = (m >= level)

        # Find all faces that lie on the boundary.
        # This speeds up the doubly nested ``for`` loop below by allowing us to
        # skip the vast majority of faces that are on the interior or the
        # exterior of the contour.
        tovisit = np.flatnonzero(
            np.any(indicator.reshape(-1, 1) != indicator[neighbors[:, ::2]],
                   axis=1))

        # Construct a graph of the edges of the contour.
        graph = nx.Graph()
        face_pairs = set()
        for ipix1 in tovisit:
            neighborhood = neighbors[ipix1]
            for _ in range(4):
                neighborhood = np.roll(neighborhood, 2)
                ipix2 = neighborhood[4]

                # Skip this pair of faces if we have already examined it.
                new_face_pair = frozenset((ipix1, ipix2))
                if new_face_pair in face_pairs:
                    continue
                face_pairs.add(new_face_pair)

                # Determine if this pair of faces are on a boundary of the
                # credible level.
                if indicator[ipix1] == indicator[ipix2]:
                    continue

                # Add the common edge of this pair of faces.
                # Label each vertex with the set of faces that they share.
                graph.add_edge(frozenset((ipix1, *neighborhood[2:5])),
                               frozenset((ipix1, *neighborhood[4:7])))
        graph = nx.freeze(graph)

        # Find contours by detecting cycles in the graph.
        cycles = nx.cycle_basis(graph)

        # Construct the coordinates of the vertices by averaging the
        # coordinates of the connected faces.
        cycles = [[
            np.sum(hp.pix2vec(nside, [i for i in v if i != -1], nest=nest), 1)
            for v in cycle
        ] for cycle in cycles]

        # Simplify paths if requested.
        if simplify:
            cycles = [_simplify(cycle, min_area) for cycle in cycles]
            cycles = [cycle for cycle in cycles if len(cycle) > 2]

        # Convert to angles.
        cycles = [
            _vec2radec(cycle, degrees=degrees).tolist() for cycle in cycles
        ]

        # Add to output paths.
        paths.append([cycle + [cycle[0]] for cycle in cycles])

    return paths