示例#1
0
    def _finalize_targets(objects, cmx_target, priority_shift):
        # -desi_target includes BGS_ANY and MWS_ANY, so we can filter just
        # -on desi_target != 0
        keep = (cmx_target != 0)
        objects = objects[keep]
        cmx_target = cmx_target[keep]
        priority_shift = priority_shift[keep]

        # -Add *_target mask columns
        # ADM note that only cmx_target is defined for commissioning
        # ADM so just pass that around
        targets = finalize(objects,
                           cmx_target,
                           cmx_target,
                           cmx_target,
                           survey='cmx')
        # ADM shift the priorities of targets with functional priorities.
        targets["PRIORITY_INIT"] += priority_shift

        return targets
示例#2
0
        hdr.delete("CONTINUE")

        fitsio.write('t/' + basename(filepath),
                     data[keep],
                     header=hdr,
                     clobber=True)

        print('made sweeps file for range {}...t={:.2f}s'.format(
            radec,
            time() - start))

    # ADM only need to write out one set of targets. So fine outside of loop.
    # ADM create a targets file for testing QA (main survey and commissioning)
    # ADM we get more test coverage if one file has > 1000 targets.
    many = yes[:1001]
    targets = finalize(data[many], desi_target[many], bgs_target[many],
                       mws_target[many])
    cmx_targets = finalize(data[keep],
                           desi_target[keep],
                           bgs_target[keep],
                           mws_target[keep],
                           survey='cmx')
    # ADM remove some columns from the target file that aren't needed for
    # ADM testing. It's a big file.
    needtargs = np.empty(len(many),
                         dtype=[('RA', '>f8'), ('DEC', '>f8'),
                                ('RELEASE', '>i2'), ('FLUX_G', '>f4'),
                                ('FLUX_R', '>f4'), ('FLUX_Z', '>f4'),
                                ('FLUX_W1', '>f4'), ('FLUX_W2', '>f4'),
                                ('MW_TRANSMISSION_G', '>f4'),
                                ('MW_TRANSMISSION_R', '>f4'),
                                ('MW_TRANSMISSION_Z', '>f4'),
示例#3
0
def supplement_skies(nskiespersqdeg=None,
                     numproc=16,
                     gaiadir=None,
                     mindec=-30.,
                     mingalb=10.,
                     radius=2.,
                     minobjid=0):
    """Generate supplemental sky locations using Gaia-G-band avoidance.

    Parameters
    ----------
    nskiespersqdeg : :class:`float`, optional
        The minimum DENSITY of sky fibers to generate. Defaults to
        reading from :func:`~desimodel.io` with a margin of 4x.
    numproc : :class:`int`, optional, defaults to 16
        The number of processes over which to parallelize.
    gaiadir : :class:`str`, optional, defaults to $GAIA_DIR
        The GAIA_DIR environment variable is set to this directory.
        If None is passed, then it's assumed to already exist.
    mindec : :class:`float`, optional, defaults to -30
        Minimum declination (o) to include for output sky locations.
    mingalb : :class:`float`, optional, defaults to 10
        Closest latitude to Galactic plane for output sky locations
        (e.g. send 10 to limit to areas beyond -10o <= b < 10o).
    radius : :class:`float`, optional, defaults to 2
        Radius at which to avoid (all) Gaia sources (arcseconds).
    minobjid : :class:`int`, optional, defaults to 0
        The minimum OBJID to start counting from in a brick. Used
        to make sure supplemental skies have different OBJIDs from
        regular skies.

    Returns
    -------
    :class:`~numpy.ndarray`
        a structured array of supplemental sky positions in the DESI sky
        target format within the passed `mindec` and `mingalb` limits.

    Notes
    -----
        - The environment variable $GAIA_DIR must be set, or `gaiadir`
          must be passed.
    """
    log.info("running on {} processors".format(numproc))

    # ADM if the GAIA directory was passed, set it.
    if gaiadir is not None:
        os.environ["GAIA_DIR"] = gaiadir

    # ADM if needed, determine the density of sky fibers to generate.
    if nskiespersqdeg is None:
        nskiespersqdeg = density_of_sky_fibers(margin=4)

    # ADM determine the HEALPixel nside of the standard Gaia files.
    anyfiles = find_gaia_files([0, 0], radec=True)
    hdr = fitsio.read_header(anyfiles[0], "GAIAHPX")
    nside = hdr["HPXNSIDE"]

    # ADM create a set of random locations accounting for mindec.
    log.info("Generating supplemental sky locations at Dec > {}o...t={:.1f}s".
             format(mindec,
                    time() - start))
    from desitarget.randoms import randoms_in_a_brick_from_edges
    ras, decs = randoms_in_a_brick_from_edges(0.,
                                              360.,
                                              mindec,
                                              90.,
                                              density=nskiespersqdeg,
                                              wrap=False)

    # ADM limit randoms by mingalb.
    log.info(
        "Generated {} sky locations. Limiting to |b| > {}o...t={:.1f}s".format(
            len(ras), mingalb,
            time() - start))
    bnorth = is_in_gal_box([ras, decs], [0, 360, mingalb, 90], radec=True)
    bsouth = is_in_gal_box([ras, decs], [0, 360, -90, -mingalb], radec=True)
    ras, decs = ras[bnorth | bsouth], decs[bnorth | bsouth]

    # ADM find HEALPixels for the random points.
    log.info(
        "Cut to {} sky locations. Finding their HEALPixels...t={:.1f}s".format(
            len(ras),
            time() - start))
    theta, phi = np.radians(90 - decs), np.radians(ras)
    pixels = hp.ang2pix(nside, theta, phi, nest=True)
    upixels = np.unique(pixels)
    npixels = len(upixels)
    log.info("Running across {} HEALPixels.".format(npixels))

    # ADM parallelize across pixels. The function to run on every pixel.
    def _get_supp(pix):
        """wrapper on get_supp_skies() given a HEALPixel"""
        ii = (pixels == pix)
        return get_supp_skies(ras[ii], decs[ii], radius=radius)

    # ADM this is just to count pixels in _update_status.
    npix = np.zeros((), dtype='i8')
    t0 = time()

    def _update_status(result):
        """wrapper function for the critical reduction operation,
        that occurs on the main parallel process"""
        if npix % 500 == 0 and npix > 0:
            rate = npix / (time() - t0)
            log.info('{}/{} HEALPixels; {:.1f} pixels/sec'.format(
                npix, npixels, rate))
        npix[...] += 1  # this is an in-place modification.
        return result

    # - Parallel process across the unique pixels.
    if numproc > 1:
        pool = sharedmem.MapReduce(np=numproc)
        with pool:
            supp = pool.map(_get_supp, upixels, reduce=_update_status)
    else:
        supp = []
        for upix in upixels:
            supp.append(_update_status(_get_supp(upix)))

    # ADM Concatenate the parallelized results into one rec array.
    supp = np.concatenate(supp)

    # ADM build the OBJIDs from the number of sources per brick.
    # ADM the for loop doesn't seem the smartest way, but it is O(n).
    log.info("Begin assigning OBJIDs to bricks...t={:.1f}s".format(time() -
                                                                   start))
    brxid = supp["BRICKID"]
    # ADM start each brick counting from minobjid.
    cntr = np.zeros(np.max(brxid) + 1, dtype=int) + minobjid
    objid = []
    for ibrx in brxid:
        cntr[ibrx] += 1
        objid.append(cntr[ibrx])
    # ADM ensure the number of sky positions that were generated doesn't exceed
    # ADM the largest possible OBJID (which is unlikely).
    if np.any(cntr > 2**targetid_mask.OBJID.nbits):
        log.fatal(
            '{} sky locations requested in brick {}, but OBJID cannot exceed {}'
            .format(nskies, brickname, 2**targetid_mask.OBJID.nbits))
        raise ValueError
    supp["OBJID"] = np.array(objid)
    log.info("Assigned OBJIDs to bricks...t={:.1f}s".format(time() - start))

    # ADM add the TARGETID, DESITARGET bits etc.
    nskies = len(supp)
    desi_target = np.zeros(nskies, dtype='>i8')
    desi_target |= desi_mask.SKY
    desi_target |= desi_mask.SUPP_SKY
    dum = np.zeros_like(desi_target)
    supp = finalize(supp, desi_target, dum, dum, sky=1)

    log.info('Done...t={:.1f}s'.format(time() - start))

    return supp
示例#4
0
def make_skies_for_a_brick(survey,
                           brickname,
                           nskiespersqdeg=None,
                           bands=['g', 'r', 'z'],
                           apertures_arcsec=[0.75],
                           write=False):
    """Generate skies for one brick in the typical format for DESI sky targets.

    Parameters
    ----------
    survey : :class:`object`
        `LegacySurveyData` object for a given Data Release of the Legacy Surveys; see
        :func:`~desitarget.skyutilities.legacypipe.util.LegacySurveyData` for details.
    brickname : :class:`str`
        Name of the brick in which to generate sky locations.
    nskiespersqdeg : :class:`float`, optional
        The minimum DENSITY of sky fibers to generate. Defaults to reading from
        :func:`~desimodel.io` with a margin of 4x.
    bands : :class:`list`, optional, defaults to ['g', 'r', 'z']
        List of bands to be used to define good sky locations.
    apertures_arcsec : :class:`list`, optional, defaults to [0.75]
        Radii in arcsec of apertures for which to derive flux at a sky location.
    write : :class:`boolean`, defaults to False
        If `True`, write the skyfibers object (which is in the format of the output
        from :func:`sky_fibers_for_brick()`) to file. The file name is derived from
        the input `survey` object and is in the form:
        `%(survey.survey_dir)/metrics/%(brick).3s/skies-%(brick)s.fits.gz`
        which is returned by `survey.find_file('skies')`.

    Returns
    -------
    :class:`~numpy.ndarray`
        a structured array of sky positions in the DESI sky target format for a brick.

    Notes
    -----
    The code generates unique OBJIDs based on an integer counter for the numbers of
    objects (objs) passed. It will therefore fail if the length of objs is longer
    than the number of bits reserved for OBJID in `desitarget.targetmask`.
    """
    # ADM this is only intended to work on one brick, so die if a larger array is passed
    # ADM needs a hack on string type as Python 2 only considered bytes to be type str.
    stringy = str
    if sys.version_info[0] == 2:
        # ADM is this is Python 2, redefine the string type.
        stringy = basestring
    if not isinstance(brickname, stringy):
        log.fatal("Only one brick can be passed at a time!")
        raise ValueError

    # ADM if needed, determine the minimum density of sky fibers to generate.
    if nskiespersqdeg is None:
        nskiespersqdeg = density_of_sky_fibers(margin=4)

    # ADM the hard-coded size of a DESI brick expressed as an area
    # ADM this is actually slightly larger than the largest brick size
    # ADM which would be 0.25x0.25 at the equator.
    area = 0.25 * 0.25

    # ADM the number of sky fibers to be generated. Must be a square number.
    nskiesfloat = area * nskiespersqdeg
    nskies = (np.sqrt(nskiesfloat).astype('int16') + 1)**2
    # log.info('Generating {} sky positions in brick {}...t = {:.1f}s'
    #         .format(nskies,brickname,time()-start))

    # ADM generate sky fiber information for this brick name.
    skytable = sky_fibers_for_brick(survey,
                                    brickname,
                                    nskies=nskies,
                                    bands=bands,
                                    apertures_arcsec=apertures_arcsec)
    # ADM if the blob file doesn't exist, skip it.
    if skytable is None:
        return None

    # ADM it's possible that a gridding could generate an unexpected
    # ADM number of sky fibers, so reset nskies based on the output.
    nskies = len(skytable)

    # ADM ensure the number of sky positions that were generated doesn't exceed
    # ADM the largest possible OBJID (which is unlikely).
    if nskies > 2**targetid_mask.OBJID.nbits:
        log.fatal(
            '{} sky locations requested in brick {}, but OBJID cannot exceed {}'
            .format(nskies, brickname, 2**targetid_mask.OBJID.nbits))
        raise ValueError

    # ADM retrieve the standard sky targets data model.
    dt = skydatamodel.dtype
    # ADM and update it according to how many apertures were requested.
    naps = len(apertures_arcsec)
    apcolindices = np.where(['APFLUX' in colname for colname in dt.names])[0]
    desc = dt.descr
    for i in apcolindices:
        desc[i] += (naps, )

    # ADM set up a rec array to hold all of the output information.
    skies = np.zeros(nskies, dtype=desc)

    # ADM populate the output recarray with the RA/Dec of the sky locations.
    skies["RA"], skies["DEC"] = skytable.ra, skytable.dec

    # ADM create an array of target bits with the SKY information set.
    desi_target = np.zeros(nskies, dtype='>i8')
    desi_target |= desi_mask.SKY

    # ADM Find where the fluxes are potentially bad. First check if locations
    # ADM have infinite errors (zero ivars) or zero fluxes in BOTH of g and r
    # ADM (these are typically outside the imaging footprint, in CCD gaps, etc.).
    # ADM checking on z, too, is probably overkill, e.g.:
    # ADM https://github.com/desihub/desitarget/issues/348
    # ADM Remember that we need to test per-band as not all bands may have
    # ADM been requested as an input...
    bstracker = np.ones((nskies, naps), dtype=bool)
    if hasattr(skytable, 'apflux_g'):
        bstracker &= (skytable.apflux_g == 0) | (skytable.apflux_ivar_g == 0)
    if hasattr(skytable, 'apflux_r'):
        bstracker &= (skytable.apflux_r == 0) | (skytable.apflux_ivar_r == 0)

    # ADM as BLOBDIST doesn't depend on the aperture, collapse across apertures.
    bstracker = np.any(bstracker, axis=1)

    # ADM ...now check for BADSKY locations that are in a blob.
    if hasattr(skytable, 'blobdist'):
        bstracker |= (skytable.blobdist == 0.)

    # ADM set any bad skies to BADSKY.
    desi_target[bstracker] = desi_mask.BAD_SKY

    # ADM add the aperture flux measurements.
    if naps == 1:
        if hasattr(skytable, 'apflux_g'):
            skies["APFLUX_G"] = np.hstack(skytable.apflux_g)
            skies["APFLUX_IVAR_G"] = np.hstack(skytable.apflux_ivar_g)
        if hasattr(skytable, 'apflux_r'):
            skies["APFLUX_R"] = np.hstack(skytable.apflux_r)
            skies["APFLUX_IVAR_R"] = np.hstack(skytable.apflux_ivar_r)
        if hasattr(skytable, 'apflux_z'):
            skies["APFLUX_Z"] = np.hstack(skytable.apflux_z)
            skies["APFLUX_IVAR_Z"] = np.hstack(skytable.apflux_ivar_z)
    else:
        if hasattr(skytable, 'apflux_g'):
            skies["APFLUX_G"] = skytable.apflux_g
            skies["APFLUX_IVAR_G"] = skytable.apflux_ivar_g
        if hasattr(skytable, 'apflux_r'):
            skies["APFLUX_R"] = skytable.apflux_r
            skies["APFLUX_IVAR_R"] = skytable.apflux_ivar_r
        if hasattr(skytable, 'apflux_z'):
            skies["APFLUX_Z"] = skytable.apflux_z
            skies["APFLUX_IVAR_Z"] = skytable.apflux_ivar_z

    # ADM add the brick info and blob distance for the sky targets.
    skies["BRICKID"] = skytable.brickid
    skies["BRICKNAME"] = skytable.brickname
    skies["BLOBDIST"] = skytable.blobdist

    # ADM set the data release from an object in a Tractor file.
    tfn = survey.find_file("tractor", brick=brickname)
    # ADM this file should be guaranteed to exist, except for unit tests.
    if os.path.exists(tfn):
        skies["RELEASE"] = fitsio.read(tfn, rows=0, columns='RELEASE')[0]

    # ADM set the objid (just use a sequential number as setting skies
    # ADM to 1 in the TARGETID will make these unique.
    skies["OBJID"] = np.arange(nskies)

    # log.info('Finalizing target bits...t = {:.1f}s'.format(time()-start))
    # ADM add target bit columns to the output array, note that mws_target
    # ADM and bgs_target should be zeros for all sky objects.
    dum = np.zeros_like(desi_target)
    skies = finalize(skies, desi_target, dum, dum, sky=1)

    if write:
        outfile = survey.find_file('skies', brick=brickname)
        log.info('Writing sky information to {}...t = {:.1f}s'.format(
            outfile,
            time() - start))
        skytable.writeto(outfile, header=skytable._header)

    # log.info('Done...t = {:.1f}s'.format(time()-start))

    return skies