def _reconstruct_nested_breadthfirst(m, extra):
    m = np.asarray(m)
    max_npix = len(m)
    max_nside = hp.npix2nside(max_npix)
    max_order = hp.nside2order(max_nside)
    seen = np.zeros(max_npix, dtype=bool)

    for order in range(max_order + 1):
        nside = hp.order2nside(order)
        npix = hp.nside2npix(nside)
        skip = max_npix // npix
        if skip > 1:
            b = m.reshape(-1, skip)
            a = b[:, 0].reshape(-1, 1)
            b = b[:, 1:]
            aseen = seen.reshape(-1, skip)
            eq = ((a == b) | ((a != a) & (b != b))).all(1) & (~aseen).all(1)
        else:
            eq = ~seen
        for ipix in np.flatnonzero(eq):
            ipix0 = ipix * skip
            ipix1 = (ipix + 1) * skip
            seen[ipix0:ipix1] = True
            if extra:
                yield _HEALPixTreeVisitExtra(
                    nside, max_nside, ipix, ipix0, ipix1, m[ipix0])
            else:
                yield _HEALPixTreeVisit(nside, ipix)
def main(args=None):
    args = parser().parse_args(args)

    import logging
    import warnings
    from astropy.io import fits
    import healpy as hp
    from ..io import read_sky_map, write_sky_map
    from ..bayestar import rasterize

    log = logging.getLogger()

    if args.nside is None:
        order = None
    else:
        order = hp.nside2order(args.nside)

    log.info('reading FITS file %s', args.input.name)
    hdus = fits.open(args.input)
    ordering = hdus[1].header['ORDERING']
    expected_ordering = 'NUNIQ'
    if ordering != expected_ordering:
        msg = 'Expected the FITS file {} to have ordering {}, but it is {}'
        warnings.warn(msg.format(args.input.name, expected_ordering, ordering))
    log.debug('converting original FITS file to Astropy table')
    table = read_sky_map(hdus, moc=True)
    log.debug('flattening HEALPix tree')
    table = rasterize(table, order=order)
    log.info('writing FITS file %s', args.output.name)
    write_sky_map(args.output.name, table, nest=True)
    log.debug('done')
Example #3
0
def _reconstruct_nested_breadthfirst(m, extra):
    max_npix = len(m)
    max_nside = hp.npix2nside(max_npix)
    max_order = hp.nside2order(max_nside)
    seen = np.zeros(max_npix, dtype=bool)

    for order in range(max_order + 1):
        nside = hp.order2nside(order)
        npix = hp.nside2npix(nside)
        skip = max_npix // npix
        if skip > 1:
            b = m.reshape(-1, skip)
            a = b[:, 0].reshape(-1, 1)
            b = b[:, 1:]
            aseen = seen.reshape(-1, skip)
            eq = ((a == b) | ((a != a) & (b != b))).all(1) & (~aseen).all(1)
        else:
            eq = ~seen
        for ipix in np.flatnonzero(eq):
            ipix0 = ipix * skip
            ipix1 = (ipix + 1) * skip
            seen[ipix0:ipix1] = True
            if extra:
                yield _HEALPixTreeVisitExtra(
                    nside, max_nside, ipix, ipix0, ipix1, m[ipix0])
            else:
                yield _HEALPixTreeVisit(nside, ipix)
Example #4
0
def update_gwemoptconfig(grb_dic, conf_dic, params):
    """
    Update parameters for GRB alert on gwemopt

    :param gw_dic:
    :param conf_dic:
    :param params: dictionary to be used to start gwemopt and that
    will be updated/completed
    :return: updated params dictionary
    """

    # For GRB, do false
    if grb_dic["teles"] == "FERMI":
        params["do3D"] = False
        # parsing skymap
        params["doDatabase"] = True
        params["dateobs"] = grb_dic["dateobs"]
        order = hp.nside2order(grb_dic["skymap"]["nside"])
        t = rasterize(grb_dic["skymap"]["skymap"], order)
        result = t['PROB']
        flat = hp.reorder(result, 'NESTED', 'RING')
        params['map_struct'] = {}
        params['map_struct']['prob'] = flat
    if params["do3D"]:
        params["DISTMEAN"] = grb_dic["skymap"]["distmu"]
        params["DISTSTD"] = grb_dic["skymap"]["distsigma"]

        # Use galaxies to compute the grade, both for tiling and galaxy
        # targeting, only when dist_mean + dist_std < 400Mpc
        if params["DISTMEAN"] + params["DISTSTD"] <= conf_dic["Dist_cut"]:
            params["doUseCatalog"] = True
            params["doCatalog"] = True
            params["writeCatalog"] = True

    return params
def adaptive_healpix_histogram(theta,
                               phi,
                               max_samples_per_pixel,
                               nside=-1,
                               max_nside=-1,
                               nest=False):
    """Adaptively histogram the posterior samples represented by the
    (theta, phi) points using a recursively subdivided HEALPix tree. Nodes are
    subdivided until each leaf contains no more than max_samples_per_pixel
    samples. Finally, the tree is flattened to a fixed-resolution HEALPix image
    with a resolution appropriate for the depth of the tree. If nside is
    specified, the result is resampled to another desired HEALPix resolution.
    """
    # Calculate pixel index of every sample, at the maximum 64-bit resolution.
    #
    # At this resolution, each pixel is only 0.2 mas across; we'll use the
    # 64-bit pixel indices as a proxy for the true sample coordinates so that
    # we don't have to do any trigonometry (aside from the initial hp.ang2pix
    # call).
    ipix = hp.ang2pix(HEALPIX_MACHINE_NSIDE, theta, phi, nest=True)

    # Build tree structure.
    if nside == -1 and max_nside == -1:
        max_order = HEALPIX_MACHINE_ORDER
    elif nside == -1:
        max_order = hp.nside2order(max_nside)
    elif max_nside == -1:
        max_order = hp.nside2order(nside)
    else:
        max_order = hp.nside2order(min(nside, max_nside))
    tree = HEALPixTree(ipix, max_samples_per_pixel, max_order)

    # Compute a flattened bitmap representation of the tree.
    p = tree.flat_bitmap

    # If requested, resample the tree to the output resolution.
    if nside != -1:
        p = hp.ud_grade(p, nside, order_in='NESTED', order_out='NESTED')

    # Normalize.
    p /= np.sum(p)

    if not nest:
        p = hp.reorder(p, n2r=True)

    # Done!
    return p
Example #6
0
def adaptive_healpix_histogram(
        theta, phi, max_samples_per_pixel, nside=-1, max_nside=-1, nest=False):
    """Adaptively histogram the posterior samples represented by the
    (theta, phi) points using a recursively subdivided HEALPix tree. Nodes are
    subdivided until each leaf contains no more than max_samples_per_pixel
    samples. Finally, the tree is flattened to a fixed-resolution HEALPix image
    with a resolution appropriate for the depth of the tree. If nside is
    specified, the result is resampled to another desired HEALPix resolution.
    """
    # Calculate pixel index of every sample, at the maximum 64-bit resolution.
    #
    # At this resolution, each pixel is only 0.2 mas across; we'll use the
    # 64-bit pixel indices as a proxy for the true sample coordinates so that
    # we don't have to do any trigonometry (aside from the initial hp.ang2pix
    # call).
    #
    # FIXME: Cast to uint64 needed because Healpy returns signed indices.
    ipix = hp.ang2pix(
        HEALPIX_MACHINE_NSIDE, theta, phi, nest=True).astype(np.uint64)

    # Build tree structure.
    if nside == -1 and max_nside == -1:
        max_order = HEALPIX_MACHINE_ORDER
    elif nside == -1:
        max_order = hp.nside2order(max_nside)
    elif max_nside == -1:
        max_order = hp.nside2order(nside)
    else:
        max_order = hp.nside2order(min(nside, max_nside))
    tree = HEALPixTree(ipix, max_samples_per_pixel, max_order)

    # Compute a flattened bitmap representation of the tree.
    p = tree.flat_bitmap

    # If requested, resample the tree to the output resolution.
    if nside != -1:
        p = hp.ud_grade(p, nside, order_in='NESTED', order_out='NESTED')

    # Normalize.
    p /= np.sum(p)

    if not nest:
        p = hp.reorder(p, n2r=True)

    # Done!
    return p
Example #7
0
 def flat(self):
     """Get flat resolution HEALPix dataset, probability density and
     distance."""
     if self.is_3d:
         order = hp.nside2order(Localization.nside)
         t = rasterize(self.table, order)
         result = t['PROB'], t['DISTMU'], t['DISTSIGMA'], t['DISTNORM']
         return hp.reorder(result, 'NESTED', 'RING')
     else:
         return self.flat_2d,
Example #8
0
def hp_split(img, order, nest=True):
    """Split the data of different part of the sphere.

    Return the splitted data and some possible index on the sphere.
    """
    npix = len(img)
    nside = hp.npix2nside(npix)
    if hp.nside2order(nside) < order:
        raise ValueError('Order not compatible with data.')
    if not nest:
        raise NotImplementedError('Implement the change of coordinate.')
    nsample = 12 * order**2
    return img.reshape([nsample, npix // nsample])
Example #9
0
 def flat_2d(self):
     """Get flat resolution HEALPix dataset, probability density only."""
     order = hp.nside2order(Localization.nside)
     result = rasterize(self.table_2d, order)['PROB']
     return hp.reorder(result, 'NESTED', 'RING')
Example #10
0
 def order(self):
     """Return the order parameter."""
     return healpy.nside2order(self._nside)
Example #11
0
def healpix_hist(input_df, NSIDE=64, groupby=[],
                 agg={"*": "count"}, returnDf=False):
    from pyspark.sql.functions import floor as FLOOR, col as COL, lit, shiftRight

    order0 = 12
    order  = hp.nside2order(NSIDE)
    shr    = 2*(order0 - order)

    # construct query
    df = input_df.withColumn('hpix__', shiftRight('hpix12', shr))
    gbcols = ('hpix__', )
    for axspec in groupby:
        if not isinstance(axspec, str):
            (col, c0, c1, dc) = axspec
            df = ( df
                .where((lit(c0) < COL(col)) & (COL(col) < lit(c1)))
                .withColumn(col + '_bin__', FLOOR((COL(col) - lit(c0)) / lit(dc)) * lit(dc) + lit(c0) )
                 )
            gbcols += ( col + '_bin__', )
        else:
            gbcols += ( axspec, )
    df = df.groupBy(*gbcols)

    # execute aggregation
    df = df.agg(agg)

    # fetch result
    df = df.toPandas()
    if returnDf:
        return df

    # repack the result into maps
    # This results line is slightly dangerous, because some aggregate functions are purely aliases.
    # E.g., mean(x) gets returned as a column avg(x).
    results = [ f"{v}({k})" if k != "*" else f"{v}(1)" for k, v in agg.items() ]    # Result columns
    def _create_map(df):
        maps = dict()
        for val in results:
            map_ = np.zeros(hp.nside2npix(NSIDE))
            # I think this line throws an error if there are no rows in the result
            map_[df.hpix__.values] = df[val].values 
            maps[val] = [ map_ ]
        return pd.DataFrame(data=maps)

    idxcols = list(gbcols[1:])
    if len(idxcols) == 0:
        ret = _create_map(df)
        assert(len(ret) == 1)
        if not returnDf:
            # convert to tuple, or scalar
            ret = tuple(ret[name].values[0] for name in results)
            if len(ret) == 1:
                ret = ret[0]
    else:
        ret = df.groupby(idxcols).apply(_create_map)
        ret.index = ret.index.droplevel(-1)
        ret.index.rename([ name.split("_bin__")[0] for name in ret.index.names ], inplace=True)
        if "count(1)" in ret:
                    ret = ret.rename(columns={'count(1)': 'count'})
        if not returnDf:
            if len(ret.columns) == 1:
                ret = ret.iloc[:, 0]
    return ret
Example #12
0
 def order(self):
     """Return the order parameter."""
     return healpy.nside2order(self._nside)
Example #13
0
def create_veto_mask(database,
                     nside=32768,
                     moc_filename=None,
                     overwrite=False,
                     debug_limit=None,
                     table='gaia_dr2_source',
                     ra_column='ra',
                     dec_column='dec',
                     mag_column='phot_g_mean_mag',
                     mag_threshold=12.0,
                     min_separation=15.0,
                     param_a=0.15,
                     param_b=1.5):
    """Generate a list of healpixels that must be avoided because
    they lie within the near-zones of bright stars (and/or galaxies TBD).

    I have a hunch that generating this list once and then testing skies
    against it will be more efficient (and reliable) than checking candidate
    skies against a list of stars. This avoids potential pitfalls of nearest
    neighbour method (e.g. when second nearest neighbour is brighter than
    nearest neighbour).

    This doesn't take too long to run, so result does not need to be preserved
    long term. However, writing out a MOC file is a convenient way to visualise
    what has been done.

    Parameters
    ----------
    database : ~sdssdb.connection.PeeweeDatabaseConnection
        A valid database connection.
    nside : int
        HEALPix resolution of the returned healpix pixel list
    moc_filename : str
        Path to the MOC file to write (or None).
    overwrite: bool
        Whether to clobber the MOC file
    debug_limit: int
        Max number of stars to return in database query - debug purposes only
    ra_column : str
        The name of the column in ``table`` that contains the Right Ascension
        coordinates, in degrees.
    dec_column : str
        The name of the column in ``table`` that contains the Declination
        coordinates, in degrees.
    mag_column : str
        The name of the column in ``table`` with the magnitude to be used to
        scale ``min_separation``.
    param_a: float
        A parameter that controls the how the radius scales with magnitude
    param_b: float
        A parameter that controls the how the radius scales with magnitude

    Returns
    -------
    mask : ~numpy.ndarray
        A (numpy) array of healpixel indices (resolution ``nside``) that
        fall within the mask.

    """

    as2rad = numpy.pi / (180.0 * 3600.0)
    hpx_order = healpy.nside2order(nside)
    pixarea = healpy.nside2pixarea(nside)

    # get the list of gaia dr2 stars brighter than G=12 from the database

    query = (f'SELECT {ra_column},{dec_column},{mag_column} from '
             f'{table} WHERE {mag_column} < {mag_threshold} AND '
             f'{ra_column} IS NOT NULL AND {dec_column} IS NOT NULL')
    if debug_limit is not None:
        query = query + f'limit {debug_limit} '

    targets = pandas.read_sql(query, database)
    print(f'Working on {len(targets):,} bright stars '
          f'({mag_column} < {mag_threshold}) from {table}')

    # compute coords on unit sphere
    vector = healpy.pixelfunc.ang2vec(targets[ra_column],
                                      targets[dec_column],
                                      lonlat=True)

    # compute mag-dependent exclusion radii
    corr = numpy.power(mag_threshold - targets[mag_column], param_b) / param_a
    radius = as2rad * min_separation + corr

    ipix_list = []
    for v, r in zip(vector, radius):
        i = healpy.query_disc(nside,
                              vec=v,
                              radius=r,
                              inclusive=True,
                              fact=4,
                              nest=True)
        if len(i) > 0:
            ipix_list.extend(list(i))

    # we only one copy of each masked pixel:
    ipix = numpy.unique(ipix_list)
    npix = len(ipix)
    print(f"Result: {npix:,} masked pixels (NSIDE={nside}), "
          f"area={npix*pixarea:.4f} sqdeg")

    if moc_filename is not None:
        m = MOC.from_healpix_cells(ipix=ipix,
                                   depth=numpy.repeat(hpx_order, len(ipix)))
        m.write(moc_filename, format='fits', overwrite=overwrite)

    return ipix