Exemplo n.º 1
0
def _reconstruct_nested_breadthfirst(m, extra):
    m = np.asarray(m)
    max_npix = len(m)
    max_nside = ah.npix_to_nside(max_npix)
    max_order = ah.nside_to_level(max_nside)
    seen = np.zeros(max_npix, dtype=bool)

    for order in range(max_order + 1):
        nside = ah.level_to_nside(order)
        npix = ah.nside_to_npix(nside)
        skip = max_npix // npix
        if skip > 1:
            b = m.reshape(-1, skip)
            a = b[:, 0].reshape(-1, 1)
            b = b[:, 1:]
            aseen = seen.reshape(-1, skip)
            eq = ((a == b) | ((a != a) & (b != b))).all(1) & (~aseen).all(1)
        else:
            eq = ~seen
        for ipix in np.flatnonzero(eq):
            ipix0 = ipix * skip
            ipix1 = (ipix + 1) * skip
            seen[ipix0:ipix1] = True
            if extra:
                yield _HEALPixTreeVisitExtra(nside, max_nside, ipix, ipix0,
                                             ipix1, m[ipix0])
            else:
                yield _HEALPixTreeVisit(nside, ipix)
Exemplo n.º 2
0
def _interpolate_level(m):
    """Recursive multi-resolution interpolation. Modifies `m` in place."""
    # Determine resolution.
    npix = len(m)

    if npix > 12:
        # Determine which pixels comprise multi-pixel tiles.
        ipix = np.flatnonzero((m[0::4] == m[1::4]) & (m[0::4] == m[2::4])
                              & (m[0::4] == m[3::4]))

        if len(ipix):
            ipix = 4 * ipix + np.expand_dims(np.arange(4, dtype=np.intp), 1)
            ipix = ipix.T.ravel()

            nside = ah.npix_to_nside(npix)

            # Downsample.
            m_lores = hp.ud_grade(m,
                                  nside // 2,
                                  order_in='NESTED',
                                  order_out='NESTED')

            # Interpolate recursively.
            _interpolate_level(m_lores)

            # Record interpolated multi-pixel tiles.
            m[ipix] = hp.get_interp_val(m_lores,
                                        *hp.pix2ang(nside, ipix, nest=True),
                                        nest=True)
Exemplo n.º 3
0
def test_combine(tmpdir):
    """Test ligo-skymap-combine."""
    fn1 = str(tmpdir / 'skymap1.fits.gz')
    fn2 = str(tmpdir / 'skymap2.fits.gz')
    fn3 = str(tmpdir / 'joint_skymap.fits.gz')

    # generate a hemisphere of constant probability
    nside1 = 32
    npix1 = ah.nside_to_npix(nside1)
    m1 = np.zeros(npix1)
    disc_idx = hp.query_disc(nside1, (1, 0, 0), np.pi / 2)
    m1[disc_idx] = 1
    m1 /= m1.sum()
    hp.write_map(fn1,
                 m1,
                 column_names=['PROBABILITY'],
                 extra_header=[('INSTRUME', 'X1')])

    # generate another hemisphere of constant probability
    # but with higher resolution and rotated 90 degrees
    nside2 = 64
    npix2 = ah.nside_to_npix(nside2)
    m2 = np.zeros(npix2)
    disc_idx = hp.query_disc(nside2, (0, 1, 0), np.pi / 2)
    m2[disc_idx] = 1
    m2 /= m2.sum()
    hp.write_map(fn2,
                 m2,
                 column_names=['PROBABILITY'],
                 extra_header=[('INSTRUME', 'Y1')])

    run_entry_point('ligo-skymap-combine', fn1, fn2, fn3)

    m3 = hp.read_map(fn3, nest=True)
    npix3 = len(m3)
    nside3 = ah.npix_to_nside(npix3)
    pix_area3 = ah.nside_to_pixel_area(nside3).to_value(u.sr)

    # resolution must match the highest original resolution
    assert npix3 == npix2
    # probability must be normalized to 1
    assert m3.sum() == pytest.approx(1)
    # support must be ¼ of the sphere
    tolerance = 10 * ah.nside_to_pixel_area(nside1).to_value(u.sr)
    assert sum(m3 > 0) * pix_area3 == pytest.approx(np.pi, abs=tolerance)

    # generate a BAYESTAR-like map with mock distance information
    d_mu = np.zeros_like(m1)
    d_sigma = np.ones_like(m1)
    d_norm = np.ones_like(m1)
    io.write_sky_map(fn1, [m1, d_mu, d_sigma, d_norm])

    run_entry_point('ligo-skymap-combine', fn1, fn2, fn3)

    m3, meta3 = io.read_sky_map(fn3, nest=True, distances=True)

    # check that marginal distance moments match what was simulated
    mean, std, _ = distance.parameters_to_moments(d_mu[0], d_sigma[0])
    assert meta3['distmean'] == pytest.approx(mean)
    assert meta3['diststd'] == pytest.approx(std)
def main(args=None):
    opts = parser().parse_args(args)

    # Late imports

    from ..io import fits
    import astropy_healpix as ah
    from astropy.coordinates import SkyCoord
    from astropy.table import Table
    from astropy import units as u
    import healpy as hp
    import numpy as np

    prob, meta = fits.read_sky_map(opts.input.name, nest=None)
    npix = len(prob)
    nside = ah.npix_to_nside(npix)
    ipix = np.arange(npix)
    ra, dec = hp.pix2ang(nside, ipix, lonlat=True, nest=meta['nest'])
    coord = SkyCoord(ra * u.deg, dec * u.deg)
    table = Table({
        'prob': prob,
        'constellation': coord.get_constellation()
    },
                  copy=False)
    table = table.group_by('constellation').groups.aggregate(np.sum)
    table.sort('prob')
    table.reverse()
    table.write(opts.output, format='ascii.tab')
Exemplo n.º 5
0
def posterior_mean(prob, nest=False):
    npix = len(prob)
    nside = ah.npix_to_nside(npix)
    xyz = hp.pix2vec(nside, np.arange(npix), nest=nest)
    mean_xyz = np.average(xyz, axis=1, weights=prob)
    pos = SkyCoord(*mean_xyz, representation_type=CartesianRepresentation)
    pos.representation_type = UnitSphericalRepresentation
    return pos
Exemplo n.º 6
0
def principal_axes(prob, distmu, distsigma, nest=False):
    npix = len(prob)
    nside = ah.npix_to_nside(npix)
    good = np.isfinite(prob) & np.isfinite(distmu) & np.isfinite(distsigma)
    ipix = np.flatnonzero(good)
    distmean, diststd, _ = parameters_to_moments(distmu[good], distsigma[good])
    mass = prob[good] * (np.square(diststd) + np.square(distmean))
    xyz = np.asarray(hp.pix2vec(nside, ipix, nest=nest))
    cov = np.dot(xyz * mass, xyz.T)
    L, V = np.linalg.eigh(cov)
    if np.linalg.det(V) < 0:
        V = -V
    return V
Exemplo n.º 7
0
def count_modes(m, nest=False):
    """Count the number of modes in a binary HEALPix image by repeatedly
    applying the flood-fill algorithm.

    WARNING: The input array is clobbered in the process.
    """
    npix = len(m)
    nside = ah.npix_to_nside(npix)
    for nmodes in range(npix):
        nonzeroipix = np.flatnonzero(m)
        if len(nonzeroipix):
            flood_fill(nside, nonzeroipix[0], m, nest=nest)
        else:
            break
    return nmodes
Exemplo n.º 8
0
def main(args=None):
    opts = parser().parse_args(args)

    # Late imports

    import numpy as np
    import matplotlib.pyplot as plt
    from matplotlib import rcParams
    from ..io import fits
    from .. import plot
    from .. import postprocess
    import astropy_healpix as ah
    from astropy.coordinates import SkyCoord
    from astropy.time import Time
    from astropy import units as u

    skymap, metadata = fits.read_sky_map(opts.input.name, nest=None)
    nside = ah.npix_to_nside(len(skymap))

    # Convert sky map from probability to probability per square degree.
    deg2perpix = ah.nside_to_pixel_area(nside).to_value(u.deg**2)
    probperdeg2 = skymap / deg2perpix

    axes_args = {}
    if opts.geo:
        axes_args['projection'] = 'geo'
        obstime = Time(metadata['gps_time'], format='gps').utc.isot
        axes_args['obstime'] = obstime
    else:
        axes_args['projection'] = 'astro'
    axes_args['projection'] += ' ' + opts.projection
    if opts.projection_center is not None:
        axes_args['center'] = SkyCoord(opts.projection_center)
    if opts.zoom_radius is not None:
        axes_args['radius'] = opts.zoom_radius
    ax = plt.axes(**axes_args)
    ax.grid()

    # Plot sky map.
    vmax = probperdeg2.max()
    img = ax.imshow_hpx((probperdeg2, 'ICRS'),
                        nested=metadata['nest'],
                        vmin=0.,
                        vmax=vmax)

    # Add colorbar.
    if opts.colorbar:
        cb = plot.colorbar(img)
        cb.set_label(r'prob. per deg$^2$')

    # Add contours.
    if opts.contour:
        cls = 100 * postprocess.find_greedy_credible_levels(skymap)
        cs = ax.contour_hpx((cls, 'ICRS'),
                            nested=metadata['nest'],
                            colors='k',
                            linewidths=0.5,
                            levels=opts.contour)
        fmt = r'%g\%%' if rcParams['text.usetex'] else '%g%%'
        plt.clabel(cs, fmt=fmt, fontsize=6, inline=True)

    # Add continents.
    if opts.geo:
        plt.plot(*plot.coastlines(),
                 color='0.5',
                 linewidth=0.5,
                 transform=ax.get_transform('world'))

    radecs = opts.radec
    if opts.inj_database:
        query = '''SELECT DISTINCT longitude, latitude FROM sim_inspiral AS si
                   INNER JOIN coinc_event_map AS cm1
                   ON (si.simulation_id = cm1.event_id)
                   INNER JOIN coinc_event_map AS cm2
                   ON (cm1.coinc_event_id = cm2.coinc_event_id)
                   WHERE cm2.event_id = ?
                   AND cm1.table_name = 'sim_inspiral'
                   AND cm2.table_name = 'coinc_event'
                   '''
        (ra,
         dec), = opts.inj_database.execute(query,
                                           (metadata['objid'], )).fetchall()
        radecs.append(np.rad2deg([ra, dec]).tolist())

    # Add markers (e.g., for injections or external triggers).
    for ra, dec in radecs:
        ax.plot_coord(SkyCoord(ra, dec, unit='deg'),
                      '*',
                      markerfacecolor='white',
                      markeredgecolor='black',
                      markersize=10)

    # Add a white outline to all text to make it stand out from the background.
    plot.outline_text(ax)

    if opts.annotate:
        text = []
        try:
            objid = metadata['objid']
        except KeyError:
            pass
        else:
            text.append('event ID: {}'.format(objid))
        if opts.contour:
            pp = np.round(opts.contour).astype(int)
            ii = np.round(
                np.searchsorted(np.sort(cls), opts.contour) *
                deg2perpix).astype(int)
            for i, p in zip(ii, pp):
                # FIXME: use Unicode symbol instead of TeX '$^2$'
                # because of broken fonts on Scientific Linux 7.
                text.append('{:d}% area: {:,d} deg²'.format(p, i))
        ax.text(1, 1, '\n'.join(text), transform=ax.transAxes, ha='right')

    # Show or save output.
    opts.output()
Exemplo n.º 9
0
def main(args=None):
    args = parser().parse_args(args)

    import numpy as np
    import astropy_healpix as ah
    from astropy.io import fits
    from astropy.time import Time
    import healpy as hp

    from ..distance import parameters_to_marginal_moments
    from ..io import read_sky_map, write_sky_map

    input_skymaps = []
    dist_mu = dist_sigma = dist_norm = None
    for input_file in args.input:
        with fits.open(input_file) as hdus:
            header = hdus[0].header.copy()
            header.extend(hdus[1].header)
            has_distance = 'DISTMU' in hdus[1].columns.names
            data, meta = read_sky_map(hdus, nest=True, distances=has_distance)

        if has_distance:
            if dist_mu is not None:
                raise RuntimeError('only one input localization can have'
                                   ' distance information')
            dist_mu = data[1]
            dist_sigma = data[2]
            dist_norm = data[3]
        else:
            data = (data, )

        nside = ah.npix_to_nside(len(data[0]))
        input_skymaps.append((nside, data[0], meta, header))

    max_nside = max(x[0] for x in input_skymaps)

    # upsample sky posteriors to maximum resolution and combine them
    combined_prob = None
    for nside, prob, _, _ in input_skymaps:
        if nside < max_nside:
            prob = hp.ud_grade(prob,
                               max_nside,
                               order_in='NESTED',
                               order_out='NESTED')
        if combined_prob is None:
            combined_prob = np.ones_like(prob)
        combined_prob *= prob

    # normalize joint posterior
    norm = combined_prob.sum()
    if norm == 0:
        raise RuntimeError('input sky localizations are disjoint')
    combined_prob /= norm

    out_kwargs = {'gps_creation_time': Time.now().gps, 'nest': True}
    if args.origin is not None:
        out_kwargs['origin'] = args.origin

    # average the various input event times
    input_gps = [x[2]['gps_time'] for x in input_skymaps if 'gps_time' in x[2]]
    if input_gps:
        out_kwargs['gps_time'] = np.mean(input_gps)

    # combine instrument tags
    out_instruments = set()
    for x in input_skymaps:
        if 'instruments' in x[2]:
            out_instruments.update(x[2]['instruments'])
    out_kwargs['instruments'] = ','.join(out_instruments)

    # update marginal distance posterior, if available
    if dist_mu is not None:
        if ah.npix_to_nside(len(dist_mu)) < max_nside:
            dist_mu = hp.ud_grade(dist_mu,
                                  max_nside,
                                  order_in='NESTED',
                                  order_out='NESTED')
            dist_sigma = hp.ud_grade(dist_sigma,
                                     max_nside,
                                     order_in='NESTED',
                                     order_out='NESTED')
            dist_norm = hp.ud_grade(dist_norm,
                                    max_nside,
                                    order_in='NESTED',
                                    order_out='NESTED')
        distmean, diststd = parameters_to_marginal_moments(
            combined_prob, dist_mu, dist_sigma)
        out_data = (combined_prob, dist_mu, dist_sigma, dist_norm)
        out_kwargs['distmean'] = distmean
        out_kwargs['diststd'] = diststd
    else:
        out_data = combined_prob

    # save input headers in output history
    out_kwargs['HISTORY'] = []
    for i, x in enumerate(input_skymaps):
        out_kwargs['HISTORY'].append('')
        out_kwargs['HISTORY'].append(
            'Headers of HDUs 0 and 1 of input file {:d}:'.format(i))
        out_kwargs['HISTORY'].append('')
        out_kwargs['HISTORY'] += [
            '{} = {}'.format(k, v) for k, v in x[3].items()
        ]

    write_sky_map(args.output, out_data, **out_kwargs)
Exemplo n.º 10
0
def healpix_to_image(healpix_data,
                     coord_system_in,
                     wcs_out,
                     shape_out,
                     order='bilinear',
                     nested=False):
    """
    Convert image in HEALPIX format to a normal FITS projection image (e.g.
    CAR or AIT).

    Parameters
    ----------
    healpix_data : `numpy.ndarray`
        HEALPIX data array
    coord_system_in : str or `~astropy.coordinates.BaseCoordinateFrame`
        The coordinate system for the input HEALPIX data, as an Astropy
        coordinate frame or corresponding string alias (e.g. ``'icrs'`` or
        ``'galactic'``)
    wcs_out : `~astropy.wcs.WCS`
        The WCS of the output array
    shape_out : tuple
        The shape of the output array
    order : int or str, optional
        The order of the interpolation (if ``mode`` is set to
        ``'interpolation'``). This can be either one of the following strings:

            * 'nearest-neighbor'
            * 'bilinear'

        or an integer. A value of ``0`` indicates nearest neighbor
        interpolation.
    nested : bool
        The order of the healpix_data, either nested or ring.  Stored in
        FITS headers in the ORDERING keyword.

    Returns
    -------
    reprojected_data : `numpy.ndarray`
        HEALPIX image resampled onto the reference image
    footprint : `~numpy.ndarray`
        Footprint of the input array in the output array. Values of 0 indicate
        no coverage or valid values in the input image, while values of 1
        indicate valid values.
    """

    healpix_data = np.asarray(healpix_data, dtype=float)

    # Look up lon, lat of pixels in reference system
    yinds, xinds = np.indices(shape_out)
    lon_out, lat_out = wcs_out.wcs_pix2world(xinds, yinds, 0)

    # Convert between celestial coordinates
    coord_system_in = parse_coord_system(coord_system_in)
    with np.errstate(invalid='ignore'):
        lon_in, lat_in = convert_world_coordinates(
            lon_out, lat_out, wcs_out, (coord_system_in, u.deg, u.deg))

    lon_in = u.Quantity(lon_in, unit=u.deg, copy=False)
    lat_in = u.Quantity(lat_in, unit=u.deg, copy=False)

    if isinstance(order, six.string_types):
        order = ORDER[order]

    nside = npix_to_nside(len(healpix_data))

    hp = HEALPix(nside=nside, order='nested' if nested else 'ring')

    if order == 1:
        data = hp.interpolate_bilinear_lonlat(lon_in, lat_in, healpix_data)
    elif order == 0:
        ipix = hp.lonlat_to_healpix(lon_in, lat_in)
        data = healpix_data[ipix]
    else:
        raise ValueError(
            "Only nearest-neighbor and bilinear interpolation are supported")

    footprint = np.ones(data.shape, bool)

    return data, footprint
Exemplo n.º 11
0
def gsm_sky_model(freqs, resolution="hi", nside=None):
    """
    Return a pyradiosky SkyModel object populated with a Global Sky Model datacube in 
    healpix format.

    Parameters
    ----------
    freqs : array_like
        Frequency array, in Hz.

    resolution : str, optional
        Whether to use the high or low resolution pygdsm maps. Options are 'hi' or 'low'.

    nside : int, optional
        Healpix nside to up- or down-sample the GSM sky model to. Default: `None` (use the 
        default from `pygdsm`, which is 1024).

    Returns
    -------
    sky_model : pyradiosky.SkyModel
        SkyModel object.
    """
    import pygdsm
    
    # Initialise GSM object
    gsm = pygdsm.GlobalSkyModel2016(data_unit="TRJ", resolution=resolution, freq_unit="Hz")

    # Construct GSM datacube
    hpmap = gsm.generate(freqs=freqs) # FIXME: nside=1024, ring ordering, galactic coords
    hpmap_units = "K"

    # Set nside or resample
    nside_gsm = int(astropy_healpix.npix_to_nside(hpmap.shape[-1]))
    if nside is None:
        # Use default nside from pygdsm map
        nside = nside_gsm
    else:
        # Transform to a user-selected nside
        hpmap_new = np.zeros((hpmap.shape[0], astropy_healpix.nside_to_npix(nside)), 
                             dtype=hpmap.dtype)
        for i in range(hpmap.shape[0]):
            hpmap_new[i,:] = hp.ud_grade(hpmap[i,:], 
                                         nside_out=nside, 
                                         order_in="RING", 
                                         order_out="RING")
        hpmap = hpmap_new

    # Get datacube properties
    npix = astropy_healpix.nside_to_npix(nside)
    indices = np.arange(npix)
    history = "pygdsm.GlobalSkyModel2016, data_unit=TRJ, resolution=low, freq_unit=MHz"
    freq = units.Quantity(freqs, "hertz")

    # hmap is in K
    stokes = units.Quantity(np.zeros((4, len(freq), len(indices))), hpmap_units)
    stokes[0] = hpmap * units.Unit(hpmap_units)

    # Construct pyradiosky SkyModel
    sky_model = pyradiosky.SkyModel(
                                    nside=nside,
                                    hpx_inds=indices,
                                    stokes=stokes,
                                    spectral_type="full",
                                    freq_array=freq,
                                    history=history,
                                    frame="galactic",
                                    hpx_order="ring"
                                )

    sky_model.healpix_interp_transform(frame='icrs', full_sky=True, inplace=True) # do coord transform
    assert sky_model.component_type == "healpix"
    return sky_model
Exemplo n.º 12
0
def read_sky_map(filename, nest=False, distances=False, moc=False, **kwargs):
    """Read a LIGO/Virgo-type sky map and return a tuple of the HEALPix array
    and a dictionary of metadata from the header.

    Parameters
    ----------
    filename: string
        Path to the optionally gzip-compressed FITS file.

    nest: bool, optional
        If omitted or False, then detect the pixel ordering in the FITS file
        and rearrange if necessary to RING indexing before returning.

        If True, then detect the pixel ordering and rearrange if necessary to
        NESTED indexing before returning.

        If None, then preserve the ordering from the FITS file.

        Regardless of the value of this option, the ordering used in the FITS
        file is indicated as the value of the 'nest' key in the metadata
        dictionary.

    distances: bool, optional
        If true, then read also read the additional HEALPix layers representing
        the conditional mean and standard deviation of distance as a function
        of sky location.

    moc: bool, optional
        If true, then preserve multi-order structure if present.

    Examples
    --------
    Test that we can read a legacy IDL-compatible file
    (https://bugs.ligo.org/redmine/issues/5168):

    >>> import tempfile
    >>> with tempfile.NamedTemporaryFile(suffix='.fits') as f:
    ...     nside = 512
    ...     npix = ah.nside_to_npix(nside)
    ...     ipix_nest = np.arange(npix)
    ...     hp.write_map(f.name, ipix_nest, nest=True, column_names=['PROB'])
    ...     m, meta = read_sky_map(f.name)
    ...     np.testing.assert_array_equal(m, hp.ring2nest(nside, ipix_nest))

    """
    m = Table.read(filename, format='fits', **kwargs)

    # Remove some keys that we do not need
    for key in ('PIXTYPE', 'EXTNAME', 'NSIDE', 'FIRSTPIX', 'LASTPIX',
                'INDXSCHM', 'MOCORDER'):
        m.meta.pop(key, None)

    if m.meta.pop('COORDSYS', 'C') != 'C':
        raise ValueError('ligo.skymap only reads and writes sky maps in '
                         'equatorial coordinates.')

    try:
        value = m.meta.pop('ORDERING')
    except KeyError:
        pass
    else:
        if value == 'RING':
            m.meta['nest'] = False
        elif value == 'NESTED':
            m.meta['nest'] = True
        elif value == 'NUNIQ':
            pass
        else:
            raise ValueError(
                'ORDERING card in header has unknown value: {0}'.format(value))

    for fits_key, rows in itertools.groupby(FITS_META_MAPPING,
                                            lambda row: row[1]):
        try:
            value = m.meta.pop(fits_key)
        except KeyError:
            pass
        else:
            for row in rows:
                key, _, _, _, from_fits = row
                if from_fits is not None:
                    m.meta[key] = from_fits(value)

    # FIXME: Fermi GBM HEALPix maps use the column name 'PROBABILITY',
    # instead of the LIGO/Virgo convention of 'PROB'.
    #
    # Fermi may change to our convention in the future, but for now we
    # rename the column.
    if 'PROBABILITY' in m.colnames:
        m.rename_column('PROBABILITY', 'PROB')

    # For a long time, we produced files with a UNIQ column that was an
    # unsigned integer. Cast it here to a signed integer so that the user
    # can handle old or new sky maps the same way.
    if 'UNIQ' in m.colnames:
        m['UNIQ'] = m['UNIQ'].astype(np.int64)

    if 'UNIQ' not in m.colnames:
        m = Table([col.ravel() for col in m.columns.values()], meta=m.meta)

    if 'UNIQ' in m.colnames and not moc:
        from ..bayestar import rasterize
        m = rasterize(m)
        m.meta['nest'] = True
    elif 'UNIQ' not in m.colnames and moc:
        from ..bayestar import derasterize
        if not m.meta['nest']:
            npix = len(m)
            nside = ah.npix_to_nside(npix)
            m = m[hp.nest2ring(nside, np.arange(npix))]
        m = derasterize(m)
        m.meta.pop('nest', None)

    if 'UNIQ' not in m.colnames:
        npix = len(m)
        nside = ah.npix_to_nside(npix)

        if nest is None:
            pass
        elif m.meta['nest'] and not nest:
            m = m[hp.ring2nest(nside, np.arange(npix))]
        elif not m.meta['nest'] and nest:
            m = m[hp.nest2ring(nside, np.arange(npix))]

    if moc:
        return m
    elif distances:
        return tuple(np.asarray(m[name])
                     for name in DEFAULT_NESTED_NAMES), m.meta
    else:
        return np.asarray(m[DEFAULT_NESTED_NAMES[0]]), m.meta
Exemplo n.º 13
0
def write_sky_map(filename, m, **kwargs):
    """Write a gravitational-wave sky map to a file, populating the header
    with optional metadata.

    Parameters
    ----------
    filename: str
        Path to the optionally gzip-compressed FITS file.

    m : `astropy.table.Table`, `numpy.array`
        If a Numpy record array or astorpy.table.Table instance, and has a
        column named 'UNIQ', then interpret the input as NUNIQ-style
        multi-order map [1]_. Otherwise, interpret as a NESTED or RING ordered
        map.

    **kwargs
        Additional metadata to add to FITS header. If m is an
        `astropy.table.Table` instance, then the header is initialized from
        both `m.meta` and `kwargs`.

    References
    ----------
    .. [1] Górski, K.M., Wandelt, B.D., Hivon, E., Hansen, F.K., & Banday, A.J.
        2017. The HEALPix Primer. The Unique Identifier scheme.
        http://healpix.sourceforge.net/html/intronode4.htm#SECTION00042000000000000000

    Examples
    --------
    Test header contents:

    >>> order = 9
    >>> nside = 2 ** order
    >>> npix = ah.nside_to_npix(nside)
    >>> prob = np.ones(npix, dtype=np.float) / npix

    >>> import tempfile
    >>> from ligo.skymap import version
    >>> with tempfile.NamedTemporaryFile(suffix='.fits') as f:
    ...     write_sky_map(f.name, prob, nest=True,
    ...                   vcs_version='foo 1.0', vcs_revision='bar',
    ...                   build_date='2018-01-01T00:00:00')
    ...     for card in fits.getheader(f.name, 1).cards:
    ...         print(str(card).rstrip())
    XTENSION= 'BINTABLE'           / binary table extension
    BITPIX  =                    8 / array data type
    NAXIS   =                    2 / number of array dimensions
    NAXIS1  =                    8 / length of dimension 1
    NAXIS2  =              3145728 / length of dimension 2
    PCOUNT  =                    0 / number of group parameters
    GCOUNT  =                    1 / number of groups
    TFIELDS =                    1 / number of table fields
    TTYPE1  = 'PROB    '
    TFORM1  = 'D       '
    TUNIT1  = 'pix-1   '
    PIXTYPE = 'HEALPIX '           / HEALPIX pixelisation
    ORDERING= 'NESTED  '           / Pixel ordering scheme: RING, NESTED, or NUNIQ
    COORDSYS= 'C       '           / Ecliptic, Galactic or Celestial (equatorial)
    NSIDE   =                  512 / Resolution parameter of HEALPIX
    INDXSCHM= 'IMPLICIT'           / Indexing: IMPLICIT or EXPLICIT
    VCSVERS = 'foo 1.0 '           / Software version
    VCSREV  = 'bar     '           / Software revision (Git)
    DATE-BLD= '2018-01-01T00:00:00' / Software build date

    >>> uniq = moc.nest2uniq(np.uint8(order), np.arange(npix))
    >>> probdensity = prob / hp.nside2pixarea(nside)
    >>> moc_data = np.rec.fromarrays(
    ...     [uniq, probdensity], names=['UNIQ', 'PROBDENSITY'])
    >>> with tempfile.NamedTemporaryFile(suffix='.fits') as f:
    ...     write_sky_map(f.name, moc_data,
    ...                   vcs_version='foo 1.0', vcs_revision='bar',
    ...                   build_date='2018-01-01T00:00:00')
    ...     for card in fits.getheader(f.name, 1).cards:
    ...         print(str(card).rstrip())
    XTENSION= 'BINTABLE'           / binary table extension
    BITPIX  =                    8 / array data type
    NAXIS   =                    2 / number of array dimensions
    NAXIS1  =                   16 / length of dimension 1
    NAXIS2  =              3145728 / length of dimension 2
    PCOUNT  =                    0 / number of group parameters
    GCOUNT  =                    1 / number of groups
    TFIELDS =                    2 / number of table fields
    TTYPE1  = 'UNIQ    '
    TFORM1  = 'K       '
    TTYPE2  = 'PROBDENSITY'
    TFORM2  = 'D       '
    TUNIT2  = 'sr-1    '
    PIXTYPE = 'HEALPIX '           / HEALPIX pixelisation
    ORDERING= 'NUNIQ   '           / Pixel ordering scheme: RING, NESTED, or NUNIQ
    COORDSYS= 'C       '           / Ecliptic, Galactic or Celestial (equatorial)
    MOCORDER=                    9 / MOC resolution (best order)
    INDXSCHM= 'EXPLICIT'           / Indexing: IMPLICIT or EXPLICIT
    VCSVERS = 'foo 1.0 '           / Software version
    VCSREV  = 'bar     '           / Software revision (Git)
    DATE-BLD= '2018-01-01T00:00:00' / Software build date

    """  # noqa: E501
    log.debug('normalizing metadata')
    if isinstance(m, Table) or (isinstance(m, np.ndarray) and m.dtype.names):
        m = Table(m, copy=False)
    else:
        if np.ndim(m) == 1:
            m = [m]
        m = Table(m, names=DEFAULT_NESTED_NAMES[:len(m)], copy=False)
    m.meta.update(kwargs)

    if 'UNIQ' in m.colnames:
        default_names = DEFAULT_NUNIQ_NAMES
        default_units = DEFAULT_NUNIQ_UNITS
        extra_header = [
            ('PIXTYPE', 'HEALPIX', 'HEALPIX pixelisation'),
            ('ORDERING', 'NUNIQ',
             'Pixel ordering scheme: RING, NESTED, or NUNIQ'),
            ('COORDSYS', 'C', 'Ecliptic, Galactic or Celestial (equatorial)'),
            ('MOCORDER', moc.uniq2order(m['UNIQ'].max()),
             'MOC resolution (best order)'),
            ('INDXSCHM', 'EXPLICIT', 'Indexing: IMPLICIT or EXPLICIT')
        ]
        # Ignore nest keyword argument if present
        m.meta.pop('nest', False)
    else:
        default_names = DEFAULT_NESTED_NAMES
        default_units = DEFAULT_NESTED_UNITS
        ordering = 'NESTED' if m.meta.pop('nest', False) else 'RING'
        extra_header = [
            ('PIXTYPE', 'HEALPIX', 'HEALPIX pixelisation'),
            ('ORDERING', ordering,
             'Pixel ordering scheme: RING, NESTED, or NUNIQ'),
            ('COORDSYS', 'C', 'Ecliptic, Galactic or Celestial (equatorial)'),
            ('NSIDE', ah.npix_to_nside(len(m)),
             'Resolution parameter of HEALPIX'),
            ('INDXSCHM', 'IMPLICIT', 'Indexing: IMPLICIT or EXPLICIT')
        ]

    for key, rows in itertools.groupby(FITS_META_MAPPING, lambda row: row[0]):
        try:
            value = m.meta.pop(key)
        except KeyError:
            pass
        else:
            for row in rows:
                _, fits_key, fits_comment, to_fits, _ = row
                if to_fits is not None:
                    extra_header.append(
                        (fits_key, to_fits(value), fits_comment))

    for default_name, default_unit in zip(default_names, default_units):
        try:
            col = m[default_name]
        except KeyError:
            pass
        else:
            if not col.unit:
                col.unit = default_unit

    log.debug('converting from Astropy table to FITS HDU list')
    hdu = fits.table_to_hdu(m)
    hdu.header.extend(extra_header)
    hdulist = fits.HDUList([fits.PrimaryHDU(), hdu])
    log.debug('saving')
    hdulist.writeto(filename, overwrite=True)
Exemplo n.º 14
0
def contour(m, levels, nest=False, degrees=False, simplify=True):
    """Calculate contours from a HEALPix dataset.

    Parameters
    ----------
    m : `numpy.ndarray`
        The HEALPix dataset.
    levels : list
        The list of contour values.
    nest : bool, default=False
        Indicates whether the input sky map is in nested rather than
        ring-indexed HEALPix coordinates (default: ring).
    degrees : bool, default=False
        Whether the contours are in degrees instead of radians.
    simplify : bool, default=True
        Whether to simplify the paths.

    Returns
    -------
    list
        A list with the same length as `levels`.
        Each item is a list of disjoint polygons, of which each item is a
        list of points, of which each is a list consisting of the right
        ascension and declination.

    Examples
    --------
    A very simply example sky map...

    >>> nside = 32
    >>> npix = ah.nside_to_npix(nside)
    >>> ra, dec = hp.pix2ang(nside, np.arange(npix), lonlat=True)
    >>> m = dec
    >>> contour(m, [10, 20, 30], degrees=True)
    [[[[..., ...], ...], ...], ...]

    """
    # Infrequently used import
    import networkx as nx

    # Determine HEALPix resolution.
    npix = len(m)
    nside = ah.npix_to_nside(npix)
    min_area = 0.4 * ah.nside_to_pixel_area(nside).to_value(u.sr)

    neighbors = hp.get_all_neighbours(nside, np.arange(npix), nest=nest).T

    # Loop over the requested contours.
    paths = []
    for level in levels:

        # Find credible region.
        indicator = (m >= level)

        # Find all faces that lie on the boundary.
        # This speeds up the doubly nested ``for`` loop below by allowing us to
        # skip the vast majority of faces that are on the interior or the
        # exterior of the contour.
        tovisit = np.flatnonzero(
            np.any(indicator.reshape(-1, 1) != indicator[neighbors[:, ::2]],
                   axis=1))

        # Construct a graph of the edges of the contour.
        graph = nx.Graph()
        face_pairs = set()
        for ipix1 in tovisit:
            neighborhood = neighbors[ipix1]
            for _ in range(4):
                neighborhood = np.roll(neighborhood, 2)
                ipix2 = neighborhood[4]

                # Skip this pair of faces if we have already examined it.
                new_face_pair = frozenset((ipix1, ipix2))
                if new_face_pair in face_pairs:
                    continue
                face_pairs.add(new_face_pair)

                # Determine if this pair of faces are on a boundary of the
                # credible level.
                if indicator[ipix1] == indicator[ipix2]:
                    continue

                # Add the common edge of this pair of faces.
                # Label each vertex with the set of faces that they share.
                graph.add_edge(frozenset((ipix1, *neighborhood[2:5])),
                               frozenset((ipix1, *neighborhood[4:7])))
        graph = nx.freeze(graph)

        # Find contours by detecting cycles in the graph.
        cycles = nx.cycle_basis(graph)

        # Construct the coordinates of the vertices by averaging the
        # coordinates of the connected faces.
        cycles = [[
            np.sum(hp.pix2vec(nside, [i for i in v if i != -1], nest=nest), 1)
            for v in cycle
        ] for cycle in cycles]

        # Simplify paths if requested.
        if simplify:
            cycles = [_simplify(cycle, min_area) for cycle in cycles]
            cycles = [cycle for cycle in cycles if len(cycle) > 2]

        # Convert to angles.
        cycles = [
            _vec2radec(cycle, degrees=degrees).tolist() for cycle in cycles
        ]

        # Add to output paths.
        paths.append([cycle + [cycle[0]] for cycle in cycles])

    return paths
Exemplo n.º 15
0
def posterior_max(prob, nest=False):
    npix = len(prob)
    nside = ah.npix_to_nside(npix)
    i = np.argmax(prob)
    return SkyCoord(
        *hp.pix2ang(nside, i, nest=nest, lonlat=True), unit=u.deg)
Exemplo n.º 16
0
def find_ellipse(prob, cl=90, projection='ARC', nest=False):
    """For a HEALPix map, find an ellipse that contains a given probability.

    The orientation is defined as the angle of the semimajor axis
    counterclockwise from west on the plane of the sky. If you think of the
    semimajor distance as the width of the ellipse, then the orientation is the
    clockwise rotation relative to the image x-axis. Equivalently, the
    orientation is the position angle of the semi-minor axis.

    These conventions match the definitions used in DS9 region files [1]_ and
    Aladin drawing commands [2]_.

    Parameters
    ----------
    prob : np.ndarray, astropy.table.Table
        The HEALPix probability map, either as a full rank explicit array
        or as a multi-order map.
    cl : float
        The desired credible level (default: 90).
    projection : str, optional
        The WCS projection (default: 'ARC', or zenithal equidistant).
        For a list of possible values, see the Astropy documentation [3]_.
    nest : bool
        HEALPix pixel ordering (default: False, or ring ordering).

    Returns
    -------
    ra : float
        The ellipse center right ascension in degrees.
    dec : float
        The ellipse center right ascension in degrees.
    a : float
        The lenth of the semimajor axis in degrees.
    b : float
        The length of the semiminor axis in degrees.
    pa : float
        The orientation of the ellipse axis on the plane of the sky in degrees.
    area : float
        The area of the ellipse in square degrees.

    Notes
    -----
    The center of the ellipse is the median a posteriori sky position. The
    length and orientation of the semi-major and semi-minor axes are measured
    as follows:

    1. The sky map is transformed to a WCS projection that may be specified by
       the caller. The default projection is ``ARC`` (zenithal equidistant), in
       which radial distances are proportional to the physical angular
       separation from the center point.
    2. A 1-sigma ellipse is estimated by calculating the covariance matrix in
       the projected image plane using three rounds of sigma clipping to reject
       distant outlier points.
    3. The 1-sigma ellipse is inflated until it encloses an integrated
       probability of ``cl`` (default: 90%).

    The function returns a tuple of the right ascension, declination,
    semi-major distance, semi-minor distance, and orientation angle, all in
    degrees.

    References
    ----------
    .. [1] http://ds9.si.edu/doc/ref/region.html
    .. [2] http://aladin.u-strasbg.fr/java/AladinScriptManual.gml#draw
    .. [3] http://docs.astropy.org/en/stable/wcs/index.html#supported-projections

    Examples
    --------
    **Example 1**

    First, we need some imports.

    >>> from astropy.io import fits
    >>> from astropy.utils.data import download_file
    >>> from astropy.wcs import WCS
    >>> import healpy as hp
    >>> from reproject import reproject_from_healpix
    >>> import subprocess

    Next, we download the BAYESTAR sky map for GW170817 from the
    LIGO Document Control Center.

    >>> url = 'https://dcc.ligo.org/public/0146/G1701985/001/bayestar.fits.gz'  # doctest: +SKIP
    >>> filename = download_file(url, cache=True, show_progress=False)  # doctest: +SKIP
    >>> _, healpix_hdu = fits.open(filename)  # doctest: +SKIP
    >>> prob = hp.read_map(healpix_hdu, verbose=False)  # doctest: +SKIP

    Then, we calculate ellipse and write it to a DS9 region file.

    >>> ra, dec, a, b, pa, area = find_ellipse(prob)  # doctest: +SKIP
    >>> print(*np.around([ra, dec, a, b, pa, area], 5))  # doctest: +SKIP
    195.03732 -19.29358 8.66545 1.1793 63.61698 32.07665
    >>> s = 'fk5;ellipse({},{},{},{},{})'.format(ra, dec, a, b, pa)  # doctest: +SKIP
    >>> open('ds9.reg', 'w').write(s)  # doctest: +SKIP

    Then, we reproject a small patch of the HEALPix map, and save it to a file.

    >>> wcs = WCS()  # doctest: +SKIP
    >>> wcs.wcs.ctype = ['RA---ARC', 'DEC--ARC']  # doctest: +SKIP
    >>> wcs.wcs.crval = [ra, dec]  # doctest: +SKIP
    >>> wcs.wcs.crpix = [128, 128]  # doctest: +SKIP
    >>> wcs.wcs.cdelt = [-0.1, 0.1]  # doctest: +SKIP
    >>> img, _ = reproject_from_healpix(healpix_hdu, wcs, [256, 256])  # doctest: +SKIP
    >>> img_hdu = fits.ImageHDU(img, wcs.to_header())  # doctest: +SKIP
    >>> img_hdu.writeto('skymap.fits')  # doctest: +SKIP

    Now open the image and region file in DS9. You should find that the ellipse
    encloses the probability hot spot. You can load the sky map and region file
    from the command line:

    .. code-block:: sh

        $ ds9 skymap.fits -region ds9.reg

    Or you can do this manually:

        1. Open DS9.
        2. Open the sky map: select "File->Open..." and choose ``skymap.fits``
           from the dialog box.
        3. Open the region file: select "Regions->Load Regions..." and choose
           ``ds9.reg`` from the dialog box.

    Now open the image and region file in Aladin.

        1. Open Aladin.
        2. Open the sky map: select "File->Load Local File..." and choose
           ``skymap.fits`` from the dialog box.
        3. Open the sky map: select "File->Load Local File..." and choose
           ``ds9.reg`` from the dialog box.

    You can also compare the original HEALPix file with the ellipse in Aladin:

        1. Open Aladin.
        2. Open the HEALPix file by pasting the URL from the top of this
           example in the Command field at the top of the window and hitting
           return, or by selecting "File->Load Direct URL...", pasting the URL,
           and clicking "Submit."
        3. Open the sky map: select "File->Load Local File..." and choose
           ``ds9.reg`` from the dialog box.

    **Example 2**

    This example shows that we get approximately the same answer for GW171087
    if we read it in as a multi-order map.

    >>> from ..io import read_sky_map  # doctest: +SKIP
    >>> skymap_moc = read_sky_map(healpix_hdu, moc=True)  # doctest: +SKIP
    >>> ellipse = find_ellipse(skymap_moc)  # doctest: +SKIP
    >>> print(*np.around(ellipse, 5))  # doctest: +SKIP
    195.03709 -19.27589 8.67611 1.18167 63.60454 32.08015

    **Example 3**

    I'm not showing the `ra` or `pa` output from the examples below because
    the right ascension is arbitary when dec=90° and the position angle is
    arbitrary when a=b; their arbitrary values may vary depending on your math
    library. Also, I add 0.0 to the outputs because on some platforms you tend
    to get values of dec or pa that get rounded to -0.0, which is within
    numerical precision but would break the doctests (see
    https://stackoverflow.com/questions/11010683).

    This is an example sky map that is uniform in sin(theta) out to a given
    radius in degrees. The 90% credible radius should be 0.9 * radius. (There
    will be deviations for small radius due to finite resolution.)

    >>> def make_uniform_in_sin_theta(radius, nside=512):
    ...     npix = ah.nside_to_npix(nside)
    ...     theta, phi = hp.pix2ang(nside, np.arange(npix))
    ...     theta_max = np.deg2rad(radius)
    ...     prob = np.where(theta <= theta_max, 1 / np.sin(theta), 0)
    ...     return prob / prob.sum()
    ...

    >>> prob = make_uniform_in_sin_theta(1)
    >>> ra, dec, a, b, pa, area = find_ellipse(prob)
    >>> dec, a, b, area  # doctest: +FLOAT_CMP
    (89.90862520480792, 0.8703361458208101, 0.8703357768874356, 2.3788811576269793)

    >>> prob = make_uniform_in_sin_theta(10)
    >>> ra, dec, a, b, pa, area = find_ellipse(prob)
    >>> dec, a, b, area  # doctest: +FLOAT_CMP
    (89.90827657529562, 9.024846562072119, 9.024842703023802, 255.11972196535515)

    >>> prob = make_uniform_in_sin_theta(120)
    >>> ra, dec, a, b, pa, area = find_ellipse(prob)
    >>> dec, a, b, area  # doctest: +FLOAT_CMP
    (90.0, 107.9745037610576, 107.97450376105758, 26988.70467497216)

    **Example 4**

    These are approximately Gaussian distributions.

    >>> from scipy import stats
    >>> def make_gaussian(mean, cov, nside=512):
    ...     npix = ah.nside_to_npix(nside)
    ...     xyz = np.transpose(hp.pix2vec(nside, np.arange(npix)))
    ...     dist = stats.multivariate_normal(mean, cov)
    ...     prob = dist.pdf(xyz)
    ...     return prob / prob.sum()
    ...

    This one is centered at RA=45°, Dec=0° and has a standard deviation of ~1°.

    >>> prob = make_gaussian(
    ...     [1/np.sqrt(2), 1/np.sqrt(2), 0],
    ...     np.square(np.deg2rad(1)))
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (45.0, 0.0, 2.1424077148886744, 2.1420790721225518, 90.0, 14.467701995920123)

    This one is centered at RA=45°, Dec=0°, and is elongated in the north-south
    direction.

    >>> prob = make_gaussian(
    ...     [1/np.sqrt(2), 1/np.sqrt(2), 0],
    ...     np.diag(np.square(np.deg2rad([1, 1, 10]))))
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (44.99999999999999, 0.0, 13.58768882719899, 2.0829846178241853, 90.0, 88.57796576937031)

    This one is centered at RA=0°, Dec=0°, and is elongated in the east-west
    direction.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     np.diag(np.square(np.deg2rad([1, 10, 1]))))
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 13.583918022027149, 2.0823769912401433, 0.0, 88.54622940628761)

    This one is centered at RA=0°, Dec=0°, and has its long axis tilted about
    10° to the west of north.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     [[0.1, 0, 0],
    ...      [0, 0.1, -0.15],
    ...      [0, -0.15, 1]])
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 64.7713312709293, 33.50754131182681, 80.78231196786838, 6372.344658663038)

    This one is centered at RA=0°, Dec=0°, and has its long axis tilted about
    10° to the east of north.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     [[0.1, 0, 0],
    ...      [0, 0.1, 0.15],
    ...      [0, 0.15, 1]])
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 64.77133127093047, 33.50754131182745, 99.21768803213159, 6372.344658663096)

    This one is centered at RA=0°, Dec=0°, and has its long axis tilted about
    80° to the east of north.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     [[0.1, 0, 0],
    ...      [0, 1, 0.15],
    ...      [0, 0.15, 0.1]])
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 64.7756448603915, 33.509863018519894, 170.78252287327365, 6372.425731592412)

    This one is centered at RA=0°, Dec=0°, and has its long axis tilted about
    80° to the west of north.

    >>> prob = make_gaussian(
    ...     [1, 0, 0],
    ...     [[0.1, 0, 0],
    ...      [0, 1, -0.15],
    ...      [0, -0.15, 0.1]])
    ...
    >>> find_ellipse(prob)  # doctest: +FLOAT_CMP
    (0.0, 0.0, 64.77564486039148, 33.50986301851987, 9.217477126726322, 6372.42573159241)

    """  # noqa: E501
    try:
        prob['UNIQ']
    except (IndexError, KeyError, ValueError):
        npix = len(prob)
        nside = ah.npix_to_nside(npix)
        ipix = range(npix)
        area = ah.nside_to_pixel_area(nside).to_value(u.deg**2)
    else:
        order, ipix = moc.uniq2nest(prob['UNIQ'])
        nside = 1 << order.astype(int)
        ipix = ipix.astype(int)
        area = ah.nside_to_pixel_area(nside).to_value(u.sr)
        prob = prob['PROBDENSITY'] * area
        area *= np.square(180 / np.pi)
        nest = True

    # Find median a posteriori sky position.
    xyz0 = [
        quantile(x, 0.5, weights=prob)
        for x in hp.pix2vec(nside, ipix, nest=nest)
    ]
    (ra, ), (dec, ) = hp.vec2ang(np.asarray(xyz0), lonlat=True)

    # Construct WCS with the specified projection
    # and centered on mean direction.
    w = WCS()
    w.wcs.crval = [ra, dec]
    w.wcs.ctype = ['RA---' + projection, 'DEC--' + projection]

    # Transform HEALPix to zenithal equidistant coordinates.
    xy = w.wcs_world2pix(
        np.transpose(hp.pix2ang(nside, ipix, nest=nest, lonlat=True)), 1)

    # Keep only values that were inside the projection.
    keep = np.logical_and.reduce(np.isfinite(xy), axis=1)
    xy = xy[keep]
    prob = prob[keep]
    if not np.isscalar(area):
        area = area[keep]

    # Find covariance matrix, performing three rounds of sigma-clipping
    # to reject outliers.
    keep = np.ones(len(xy), dtype=bool)
    for _ in range(3):
        c = np.cov(xy[keep], aweights=prob[keep], rowvar=False)
        nsigmas = np.sqrt(np.sum(xy.T * np.linalg.solve(c, xy.T), axis=0))
        keep &= (nsigmas < 3)

    # Find the number of sigma that enclose the cl% credible level.
    i = np.argsort(nsigmas)
    nsigmas = nsigmas[i]
    cls = np.cumsum(prob[i])
    if np.isscalar(area):
        careas = np.arange(1, len(i) + 1) * area
    else:
        careas = np.cumsum(area[i])
    nsigma = np.interp(1e-2 * cl, cls, nsigmas)
    area = np.interp(1e-2 * cl, cls, careas)

    # If the credible level is not within the projection,
    # then stop here and return all nans.
    if 1e-2 * cl > cls[-1]:
        return np.nan, np.nan, np.nan, np.nan, np.nan

    # Find the eigendecomposition of the covariance matrix.
    w, v = np.linalg.eigh(c)

    # Find the semi-minor and semi-major axes.
    b, a = nsigma * np.sqrt(w)

    # Find the position angle.
    pa = np.rad2deg(np.arctan2(*v[0]))

    # An ellipse is symmetric under rotations of 180°.
    # Return the smallest possible positive position angle.
    pa %= 180

    # Done!
    return ra, dec, a, b, pa, area
Exemplo n.º 17
0
def healpix_to_image(healpix_data, coord_system_in, wcs_out, shape_out,
                     order='bilinear', nested=False):
    """
    Convert image in HEALPIX format to a normal FITS projection image (e.g.
    CAR or AIT).

    Parameters
    ----------
    healpix_data : `numpy.ndarray`
        HEALPIX data array
    coord_system_in : str or `~astropy.coordinates.BaseCoordinateFrame`
        The coordinate system for the input HEALPIX data, as an Astropy
        coordinate frame or corresponding string alias (e.g. ``'icrs'`` or
        ``'galactic'``)
    wcs_out : `~astropy.wcs.WCS`
        The WCS of the output array
    shape_out : tuple
        The shape of the output array
    order : int or str, optional
        The order of the interpolation (if ``mode`` is set to
        ``'interpolation'``). This can be either one of the following strings:

            * 'nearest-neighbor'
            * 'bilinear'

        or an integer. A value of ``0`` indicates nearest neighbor
        interpolation.
    nested : bool
        The order of the healpix_data, either nested or ring.  Stored in
        FITS headers in the ORDERING keyword.

    Returns
    -------
    reprojected_data : `numpy.ndarray`
        HEALPIX image resampled onto the reference image
    footprint : `~numpy.ndarray`
        Footprint of the input array in the output array. Values of 0 indicate
        no coverage or valid values in the input image, while values of 1
        indicate valid values.
    """

    healpix_data = np.asarray(healpix_data, dtype=float)

    # Look up lon, lat of pixels in reference system
    yinds, xinds = np.indices(shape_out)
    lon_out, lat_out = wcs_out.wcs_pix2world(xinds, yinds, 0)

    # Convert between celestial coordinates
    coord_system_in = parse_coord_system(coord_system_in)
    with np.errstate(invalid='ignore'):
        lon_in, lat_in = convert_world_coordinates(lon_out, lat_out, wcs_out, (coord_system_in, u.deg, u.deg))

    lon_in = u.Quantity(lon_in, unit=u.deg, copy=False)
    lat_in = u.Quantity(lat_in, unit=u.deg, copy=False)

    if isinstance(order, six.string_types):
        order = ORDER[order]

    nside = npix_to_nside(len(healpix_data))

    hp = HEALPix(nside=nside, order='nested' if nested else 'ring')

    if order == 1:
        data = hp.interpolate_bilinear_lonlat(lon_in, lat_in, healpix_data)
    elif order == 0:
        ipix = hp.lonlat_to_healpix(lon_in, lat_in)
        data = healpix_data[ipix]
    else:
        raise ValueError("Only nearest-neighbor and bilinear interpolation are supported")

    footprint = np.ones(data.shape, bool)

    return data, footprint