示例#1
0
    def setUp(self):
        self.fileio = "test_spectra.fits"
        self.fileappend = "test_spectra_append.fits"
        self.filebuild = "test_spectra_build.fits"
        self.meta = {
            "KEY1" : "VAL1",
            "KEY2" : "VAL2"
        }
        self.nwave = 100
        self.nspec = 5
        self.ndiag = 3

        fmap = empty_fibermap(self.nspec)
        fmap = add_columns(fmap,
                           ['NIGHT', 'EXPID', 'TILEID'],
                           [np.int32(0), np.int32(0), np.int32(0)],
                           )
        
        for s in range(self.nspec):
            fmap[s]["TARGETID"] = 456 + s
            fmap[s]["FIBER"] = 123 + s
            fmap[s]["NIGHT"] = s
            fmap[s]["EXPID"] = s
        self.fmap1 = encode_table(fmap)

        fmap = empty_fibermap(self.nspec)
        fmap = add_columns(fmap,
                           ['NIGHT', 'EXPID', 'TILEID'],
                           [np.int32(0), np.int32(0), np.int32(0)],
                           )

        for s in range(self.nspec):
            fmap[s]["TARGETID"] = 789 + s
            fmap[s]["FIBER"] = 200 + s
            fmap[s]["NIGHT"] = 1000
            fmap[s]["EXPID"] = s
        self.fmap2 = encode_table(fmap)

        self.bands = ["b", "r", "z"]

        self.wave = {}
        self.flux = {}
        self.ivar = {}
        self.mask = {}
        self.res = {}
        self.extra = {}

        for s in range(self.nspec):
            for b in self.bands:
                self.wave[b] = np.arange(self.nwave)
                self.flux[b] = np.repeat(np.arange(self.nspec), 
                    self.nwave).reshape( (self.nspec, self.nwave) ) + 3.0
                self.ivar[b] = 1.0 / self.flux[b]
                self.mask[b] = np.tile(np.arange(2, dtype=np.uint32), 
                    (self.nwave * self.nspec) // 2).reshape( (self.nspec, self.nwave) )
                self.res[b] = np.zeros( (self.nspec, self.ndiag, self.nwave), 
                    dtype=np.float64)
                self.res[b][:,1,:] = 1.0
                self.extra[b] = {}
                self.extra[b]["FOO"] = self.flux[b]
示例#2
0
def read_frame_as_spectra(filename, night, expid, band, single=False):
    """
    Read a FITS file containing a Frame and return a Spectra.

    A Frame file is very close to a Spectra object (by design), and
    only differs by missing the NIGHT and EXPID in the fibermap, as
    well as containing only one band of data.

    Args:
        infile (str): path to read
        night (int): the night value to use for all rows of the fibermap.
        expid (int): the expid value to use for all rows of the fibermap.
        band (str): the name of this band.
        single (bool): if True, keep spectra as single precision in memory.

    Returns (Spectra):
        The object containing the data read from disk.

    """
    fr = read_frame(filename)
    if fr.fibermap is None:
        raise RuntimeError(
            "reading Frame files into Spectra only supported if a fibermap exists"
        )

    nspec = len(fr.fibermap)

    fmap = np.zeros(shape=(nspec, ), dtype=spectra_columns())
    for s in range(nspec):
        for tp in fr.fibermap.dtype.fields:
            fmap[s][tp] = fr.fibermap[s][tp]

    fmap[:]["NIGHT"] = night
    fmap[:]["EXPID"] = expid

    fmap = encode_table(fmap)

    bands = [band]

    mask = None
    if fr.mask is not None:
        mask = {band: fr.mask}

    res = None
    if fr.resolution_data is not None:
        res = {band: fr.resolution_data}

    extra = None
    if fr.chi2pix is not None:
        extra = {band: {"CHI2PIX": fr.chi2pix}}

    spec = Spectra(bands, {band: fr.wave}, {band: fr.flux}, {band: fr.ivar},
                   mask=mask,
                   resolution_data=res,
                   fibermap=fmap,
                   meta=fr.meta,
                   extra=extra,
                   single=single)

    return spec
示例#3
0
    def add_objects(self, flux, ivar, wave, resolution, object_data, night,
                    expid):
        """Add a list of objects to this brick file from the same night and exposure.

        Args:
            flux(numpy.ndarray): Array of (nobj,nwave) flux values for nobj objects tabulated at nwave wavelengths.
            ivar(numpy.ndarray): Array of (nobj,nwave) inverse-variance values.
            wave(numpy.ndarray): Array of (nwave,) wavelength values in Angstroms. All objects are assumed to use the same wavelength grid.
            resolution(numpy.ndarray): Array of (nobj,nres,nwave) resolution matrix elements.
            object_data(astropy.table.Table): fibermap rows for the objects to add.
            night(str): Date string for the night these objects were observed in the format YYYYMMDD.
            expid(int): Exposure number for these objects.

        Raises:
            RuntimeError: Can only add objects in update mode.
        """
        super(Brick, self).add_objects(flux, ivar, wave, resolution)

        augmented_data = table.Table(object_data)
        augmented_data['NIGHT'] = int(night)
        augmented_data['EXPID'] = expid

        fibermap_hdu = self.hdu_list['FIBERMAP']
        if len(fibermap_hdu.data) > 0:
            orig_data = table.Table(fibermap_hdu.data)
            augmented_data = table.vstack([orig_data, augmented_data])

        #- unicode -> ascii columns
        augmented_data = encode_table(augmented_data)

        updated_hdu = astropy.io.fits.convenience.table_to_hdu(augmented_data)
        updated_hdu.header = fibermap_hdu.header
        self.hdu_list['FIBERMAP'].data = updated_hdu.data
示例#4
0
def spectra_dtype():
    """
    Return the astropy table dtype after encoding.
    """
    pre = np.zeros(shape=(1, ), dtype=spectra_columns())
    post = encode_table(pre)
    return post.dtype
示例#5
0
def write_fiberflat(outfile,fiberflat,header=None, fibermap=None):
    """Write fiberflat object to outfile

    Args:
        outfile: filepath string or (night, expid, camera) tuple
        fiberflat: FiberFlat object

    Optional:
        header: dict or fits.Header object to use as HDU 0 header
        fibermap: table to store as FIBERMAP HDU

    Returns:
        filepath of file that was written
    """
    log = get_logger()
    outfile = makepath(outfile, 'fiberflat')

    if header is None:
        hdr = fitsheader(fiberflat.header)
    else:
        hdr = fitsheader(header)
    if fiberflat.chi2pdf is not None:
        hdr['chi2pdf'] = float(fiberflat.chi2pdf)

    hdr['EXTNAME'] = 'FIBERFLAT'
    if 'BUNIT' in hdr:
        del hdr['BUNIT']

    add_dependencies(hdr)

    ff = fiberflat   #- shorthand

    hdus = fits.HDUList()
    hdus.append(fits.PrimaryHDU(ff.fiberflat.astype('f4'), header=hdr))
    hdus.append(fits.ImageHDU(ff.ivar.astype('f4'),     name='IVAR'))
    hdus.append(fits.ImageHDU(ff.mask,              name='MASK'))
    hdus.append(fits.ImageHDU(ff.meanspec.astype('f4'), name='MEANSPEC'))
    hdus.append(fits.ImageHDU(ff.wave.astype('f4'),     name='WAVELENGTH'))
    if fibermap is None :
        fibermap=ff.fibermap
    if fibermap is not None:
        fibermap = encode_table(fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append( fits.convenience.table_to_hdu(fibermap) )
    hdus[0].header['BUNIT'] = ("","adimensional quantity to divide to flatfield a frame")
    hdus["IVAR"].header['BUNIT'] = ("","inverse variance, adimensional")
    hdus["MEANSPEC"].header['BUNIT'] = ("electron/Angstrom")
    hdus["WAVELENGTH"].header['BUNIT'] = 'Angstrom'

    t0 = time.time()
    hdus.writeto(outfile+'.tmp', overwrite=True, checksum=True)
    os.rename(outfile+'.tmp', outfile)
    duration = time.time() - t0
    log.info(iotime.format('write', outfile, duration))

    return outfile
示例#6
0
def write_fiberflat(outfile, fiberflat, header=None, fibermap=None):
    """Write fiberflat object to outfile

    Args:
        outfile: filepath string or (night, expid, camera) tuple
        fiberflat: FiberFlat object

    Optional:
        header: dict or fits.Header object to use as HDU 0 header
        fibermap: table to store as FIBERMAP HDU

    Returns:
        filepath of file that was written
    """
    outfile = makepath(outfile, 'fiberflat')

    if header is None:
        hdr = fitsheader(fiberflat.header)
    else:
        hdr = fitsheader(header)
    if fiberflat.chi2pdf is not None:
        hdr['chi2pdf'] = float(fiberflat.chi2pdf)

    hdr['EXTNAME'] = 'FIBERFLAT'
    if 'BUNIT' in hdr:
        del hdr['BUNIT']

    add_dependencies(hdr)

    ff = fiberflat  #- shorthand

    hdus = fits.HDUList()
    hdus.append(fits.PrimaryHDU(ff.fiberflat.astype('f4'), header=hdr))
    hdus.append(fits.ImageHDU(ff.ivar.astype('f4'), name='IVAR'))
    hdus.append(fits.ImageHDU(ff.mask, name='MASK'))
    hdus.append(fits.ImageHDU(ff.meanspec.astype('f4'), name='MEANSPEC'))
    hdus.append(fits.ImageHDU(ff.wave.astype('f4'), name='WAVELENGTH'))
    if fibermap is None:
        fibermap = ff.fibermap
    if fibermap is not None:
        fibermap = encode_table(fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    hdus[-1].header['BUNIT'] = 'Angstrom'

    hdus.writeto(outfile + '.tmp', overwrite=True, checksum=True)
    os.rename(outfile + '.tmp', outfile)
    return outfile
示例#7
0
def write_flux_calibration(outfile, fluxcalib, header=None):
    """Writes  flux calibration.

    Args:
        outfile : output file name
        fluxcalib : FluxCalib object

    Options:
        header : dict-like object of key/value pairs to include in header
    """
    log = get_logger()
    hx = fits.HDUList()

    hdr = fitsheader(header)
    add_dependencies(hdr)

    hdr['EXTNAME'] = 'FLUXCALIB'
    hdr['BUNIT'] = ('10**+17 cm2 count s / erg', 'i.e. (elec/A) / (1e-17 erg/s/cm2/A)')
    hx.append( fits.PrimaryHDU(fluxcalib.calib.astype('f4'), header=hdr) )
    hx.append( fits.ImageHDU(fluxcalib.ivar.astype('f4'), name='IVAR') )
    # hx.append( fits.CompImageHDU(fluxcalib.mask, name='MASK') )
    hx.append( fits.ImageHDU(fluxcalib.mask, name='MASK') )
    hx.append( fits.ImageHDU(fluxcalib.wave.astype('f4'), name='WAVELENGTH') )
    hx[-1].header['BUNIT'] = 'Angstrom'

    if fluxcalib.fibercorr is not None :
        tbl = encode_table(fluxcalib.fibercorr)  #- unicode -> bytes
        tbl.meta['EXTNAME'] = 'FIBERCORR'
        hx.append( fits.convenience.table_to_hdu(tbl) )
        if fluxcalib.fibercorr_comments is not None : # add comments in header
            hdu=hx['FIBERCORR']
            for i in range(1,999):
                key = 'TTYPE'+str(i)
                if key in hdu.header:
                    value = hdu.header[key]
                    if value in fluxcalib.fibercorr_comments.keys() :
                        hdu.header[key] = (value, fluxcalib.fibercorr_comments[value])

    t0 = time.time()
    hx.writeto(outfile+'.tmp', overwrite=True, checksum=True)
    os.rename(outfile+'.tmp', outfile)
    duration = time.time() - t0
    log.info(iotime.format('write', outfile, duration))

    return outfile
示例#8
0
def write_qframe(outfile, qframe, header=None, fibermap=None, units=None):
    """Write a frame fits file and returns path to file written.

    Args:
        outfile: full path to output file, or tuple (night, expid, channel)
        qframe:  desispec.qproc.QFrame object with wave, flux, ivar...

    Optional:
        header: astropy.io.fits.Header or dict to override frame.header
        fibermap: table to store as FIBERMAP HDU

    Returns:
        full filepath of output file that was written

    Note:
        to create a QFrame object to pass into write_qframe,
        qframe = QFrame(wave, flux, ivar)
    """
    log = get_logger()
    outfile = makepath(outfile, 'qframe')

    if header is not None:
        hdr = fitsheader(header)
    else:
        hdr = fitsheader(qframe.meta)

    add_dependencies(hdr)

    hdus = fits.HDUList()
    x = fits.PrimaryHDU(qframe.flux.astype('f4'), header=hdr)
    x.header['EXTNAME'] = 'FLUX'
    if units is not None:
        units = str(units)
        if 'BUNIT' in hdr and hdr['BUNIT'] != units:
            log.warning('BUNIT {bunit} != units {units}; using {units}'.format(
                bunit=hdr['BUNIT'], units=units))
        x.header['BUNIT'] = units
    hdus.append(x)

    hdus.append(fits.ImageHDU(qframe.ivar.astype('f4'), name='IVAR'))
    if qframe.mask is None:
        qframe.mask = np.zeros(qframe.flux.shape, dtype=np.uint32)
    # hdus.append( fits.CompImageHDU(qframe.mask, name='MASK') )
    hdus.append(fits.ImageHDU(qframe.mask, name='MASK'))

    if qframe.sigma is None:
        qframe.sigma = np.zeros(qframe.flux.shape, dtype=np.float)
    hdus.append(fits.ImageHDU(qframe.sigma.astype('f4'), name='YSIGMA'))

    hdus.append(fits.ImageHDU(qframe.wave.astype('f8'), name='WAVELENGTH'))
    hdus[-1].header['BUNIT'] = 'Angstrom'
    if fibermap is not None:
        fibermap = encode_table(fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    elif qframe.fibermap is not None:
        fibermap = encode_table(qframe.fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    elif qframe.spectrograph is not None:
        x.header[
            'FIBERMIN'] = 500 * qframe.spectrograph  # Hard-coded (as in desispec.qproc.qframe)
    else:
        log.error(
            "You are likely writing a qframe without sufficient fiber info")
        raise ValueError('no fibermap')

    hdus.writeto(outfile + '.tmp', overwrite=True, checksum=True)
    os.rename(outfile + '.tmp', outfile)

    return outfile
示例#9
0
文件: util.py 项目: samikama/desispec
def write_bintable(filename,
                   data,
                   header=None,
                   comments=None,
                   units=None,
                   extname=None,
                   clobber=False):
    """Utility function to write a fits binary table complete with
    comments and units in the FITS header too.  DATA can either be
    dictionary, an Astropy Table, a numpy.recarray or a numpy.ndarray.
    """
    from astropy.table import Table
    from desiutil.io import encode_table
    #- Convert data as needed
    if isinstance(data, (np.recarray, np.ndarray, Table)):
        outdata = encode_table(data, encoding='ascii')
    else:
        outdata = encode_table(_dict2ndarray(data), encoding='ascii')

    # hdu = astropy.io.fits.BinTableHDU(outdata, header=header, name=extname)
    hdu = astropy.io.fits.convenience.table_to_hdu(outdata)
    if extname is not None:
        hdu.header['EXTNAME'] = extname

    if header is not None:
        for key, value in header.items():
            hdu.header[key] = value

    #- Write the data and header
    if clobber:
        astropy.io.fits.writeto(filename,
                                hdu.data,
                                hdu.header,
                                clobber=True,
                                checksum=True)
    else:
        astropy.io.fits.append(filename, hdu.data, hdu.header, checksum=True)

    #- TODO:
    #- The following could probably be implemented for efficiently by updating
    #- the outdata Table metadata directly before writing it out.
    #- The following was originally implemented when outdata was a numpy array.

    #- Allow comments and units to be None
    if comments is None:
        comments = dict()
    if units is None:
        units = dict()

    #- Reopen the file to add the comments and units
    fx = astropy.io.fits.open(filename, mode='update')
    hdu = fx[extname]
    for i in range(1, 999):
        key = 'TTYPE' + str(i)
        if key not in hdu.header:
            break
        else:
            value = hdu.header[key]
            if value in comments:
                hdu.header[key] = (value, comments[value])
            if value in units:
                hdu.header['TUNIT' + str(i)] = (units[value], value + ' units')

    #- Write updated header and close file
    fx.flush()
    fx.close()
示例#10
0
def read_frame_as_spectra(filename,
                          night=None,
                          expid=None,
                          band=None,
                          single=False):
    """
    Read a FITS file containing a Frame and return a Spectra.

    A Frame file is very close to a Spectra object (by design), and
    only differs by missing the NIGHT and EXPID in the fibermap, as
    well as containing only one band of data.

    Args:
        infile (str): path to read

    Options:
        night (int): the night value to use for all rows of the fibermap.
        expid (int): the expid value to use for all rows of the fibermap.
        band (str): the name of this band.
        single (bool): if True, keep spectra as single precision in memory.

    Returns (Spectra):
        The object containing the data read from disk.

    """
    fr = read_frame(filename)
    if fr.fibermap is None:
        raise RuntimeError(
            "reading Frame files into Spectra only supported if a fibermap exists"
        )

    nspec = len(fr.fibermap)

    if band is None:
        band = fr.meta['camera'][0]

    if night is None:
        night = fr.meta['night']

    if expid is None:
        expid = fr.meta['expid']

    fmap = np.asarray(fr.fibermap.copy())
    fmap = add_columns(
        fmap,
        ['NIGHT', 'EXPID', 'TILEID'],
        [np.int32(night),
         np.int32(expid),
         np.int32(fr.meta['TILEID'])],
    )

    fmap = encode_table(fmap)

    bands = [band]

    mask = None
    if fr.mask is not None:
        mask = {band: fr.mask}

    res = None
    if fr.resolution_data is not None:
        res = {band: fr.resolution_data}

    extra = None
    if fr.chi2pix is not None:
        extra = {band: {"CHI2PIX": fr.chi2pix}}

    spec = Spectra(bands, {band: fr.wave}, {band: fr.flux}, {band: fr.ivar},
                   mask=mask,
                   resolution_data=res,
                   fibermap=fmap,
                   meta=fr.meta,
                   extra=extra,
                   single=single,
                   scores=fr.scores)

    return spec
示例#11
0
def write_frame(outfile, frame, header=None, fibermap=None, units=None):
    """Write a frame fits file and returns path to file written.

    Args:
        outfile: full path to output file, or tuple (night, expid, channel)
        frame:  desispec.frame.Frame object with wave, flux, ivar...

    Optional:
        header: astropy.io.fits.Header or dict to override frame.header
        fibermap: table to store as FIBERMAP HDU

    Returns:
        full filepath of output file that was written

    Note:
        to create a Frame object to pass into write_frame,
        frame = Frame(wave, flux, ivar, resolution_data)
    """
    log = get_logger()
    outfile = makepath(outfile, 'frame')

    #- Ignore some known and harmless units warnings
    import warnings
    warnings.filterwarnings(
        'ignore', message="'.*nanomaggies.* did not parse as fits unit.*")
    warnings.filterwarnings(
        'ignore', message=".*'10\*\*6 arcsec.* did not parse as fits unit.*")

    if header is not None:
        hdr = fitsheader(header)
    else:
        hdr = fitsheader(frame.meta)

    add_dependencies(hdr)

    # Vette
    diagnosis = frame.vet()
    if diagnosis != 0:
        raise IOError(
            "Frame did not pass simple vetting test. diagnosis={:d}".format(
                diagnosis))

    hdus = fits.HDUList()
    x = fits.PrimaryHDU(frame.flux.astype('f4'), header=hdr)
    x.header['EXTNAME'] = 'FLUX'
    if units is not None:
        units = str(units)
        if 'BUNIT' in hdr and hdr['BUNIT'] != units:
            log.warning('BUNIT {bunit} != units {units}; using {units}'.format(
                bunit=hdr['BUNIT'], units=units))
        x.header['BUNIT'] = units
    hdus.append(x)

    hdus.append(fits.ImageHDU(frame.ivar.astype('f4'), name='IVAR'))
    # hdus.append( fits.CompImageHDU(frame.mask, name='MASK') )
    hdus.append(fits.ImageHDU(frame.mask, name='MASK'))
    hdus.append(fits.ImageHDU(frame.wave.astype('f8'), name='WAVELENGTH'))
    hdus[-1].header['BUNIT'] = 'Angstrom'
    if frame.resolution_data is not None:
        hdus.append(
            fits.ImageHDU(frame.resolution_data.astype('f4'),
                          name='RESOLUTION'))
    elif frame.wsigma is not None:
        log.debug("Using ysigma from qproc")
        qrimg = fits.ImageHDU(frame.wsigma.astype('f4'), name='YSIGMA')
        qrimg.header["NDIAG"] = frame.ndiag
        hdus.append(qrimg)
    if fibermap is not None:
        fibermap = encode_table(fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    elif frame.fibermap is not None:
        fibermap = encode_table(frame.fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    elif frame.spectrograph is not None:
        x.header[
            'FIBERMIN'] = 500 * frame.spectrograph  # Hard-coded (as in desispec.frame)
    else:
        log.error(
            "You are likely writing a frame without sufficient fiber info")

    if frame.chi2pix is not None:
        hdus.append(fits.ImageHDU(frame.chi2pix.astype('f4'), name='CHI2PIX'))

    if frame.scores is not None:
        scores_tbl = encode_table(frame.scores)  #- unicode -> bytes
        scores_tbl.meta['EXTNAME'] = 'SCORES'
        hdus.append(fits.convenience.table_to_hdu(scores_tbl))
        if frame.scores_comments is not None:  # add comments in header
            hdu = hdus['SCORES']
            for i in range(1, 999):
                key = 'TTYPE' + str(i)
                if key in hdu.header:
                    value = hdu.header[key]
                    if value in frame.scores_comments.keys():
                        hdu.header[key] = (value, frame.scores_comments[value])

    hdus.writeto(outfile + '.tmp', overwrite=True, checksum=True)

    os.rename(outfile + '.tmp', outfile)

    return outfile
示例#12
0
文件: util.py 项目: sbailey/desispec
def write_bintable(filename,
                   data,
                   header=None,
                   comments=None,
                   units=None,
                   extname=None,
                   clobber=False,
                   primary_extname='PRIMARY'):
    """Utility function to write a fits binary table complete with
    comments and units in the FITS header too.  DATA can either be
    dictionary, an Astropy Table, a numpy.recarray or a numpy.ndarray.
    """
    from astropy.table import Table
    from desiutil.io import encode_table

    log = get_logger()

    #- Convert data as needed
    if isinstance(data, (np.recarray, np.ndarray, Table)):
        outdata = encode_table(data, encoding='ascii')
    else:
        outdata = encode_table(_dict2ndarray(data), encoding='ascii')

    # hdu = astropy.io.fits.BinTableHDU(outdata, header=header, name=extname)
    hdu = astropy.io.fits.convenience.table_to_hdu(outdata)
    if extname is not None:
        hdu.header['EXTNAME'] = extname
    else:
        log.warning("Table does not have EXTNAME set!")

    if header is not None:
        if isinstance(header, astropy.io.fits.header.Header):
            for key, value in header.items():
                comment = header.comments[key]
                hdu.header[key] = (value, comment)
        else:
            hdu.header.update(header)

    #- Allow comments and units to be None
    if comments is None:
        comments = dict()
    if units is None:
        units = dict()
    #
    # Add comments and units to the *columns* of the table.
    #
    for i in range(1, 999):
        key = 'TTYPE' + str(i)
        if key not in hdu.header:
            break
        else:
            value = hdu.header[key]
            if value in comments:
                hdu.header[key] = (value, comments[value])
            if value in units:
                hdu.header['TUNIT' + str(i)] = (units[value], value + ' units')
    #
    # Add checksum cards.
    #
    hdu.add_checksum()

    #- Write the data and header

    if os.path.isfile(filename):
        if not (extname is None and clobber):
            #
            # Always open update mode with memmap=False, but keep the
            # formal check commented out in case we need it in the future.
            #
            memmap = False
            #
            # Check to see if filesystem supports memory-mapping on update.
            #
            # memmap = _supports_memmap(filename)
            # if not memmap:
            #     log.warning("Filesystem does not support memory-mapping!")
            with astropy.io.fits.open(filename, mode='update',
                                      memmap=memmap) as hdulist:
                if extname is None:
                    #
                    # In DESI, we should *always* be setting the extname, so this
                    # might never be called.
                    #
                    log.debug("Adding new HDU to %s.", filename)
                    hdulist.append(hdu)
                else:
                    if extname in hdulist:
                        if clobber:
                            log.debug(
                                "Replacing HDU with EXTNAME = '%s' in %s.",
                                extname, filename)
                            hdulist[extname] = hdu
                        else:
                            log.warning(
                                "Do not modify %s because EXTNAME = '%s' exists.",
                                filename, extname)
                    else:
                        log.debug("Adding new HDU with EXTNAME = '%s' to %s.",
                                  extname, filename)
                        hdulist.append(hdu)
            return
    #
    # If we reach this point, we're writing a new file.
    #
    if os.path.isfile(filename):
        log.debug("Overwriting %s.", filename)
    else:
        log.debug("Writing new file %s.", filename)
    hdu0 = astropy.io.fits.PrimaryHDU()
    hdu0.header['EXTNAME'] = primary_extname
    hdulist = astropy.io.fits.HDUList([hdu0, hdu])
    hdulist.writeto(filename, overwrite=clobber, checksum=True)
    return
示例#13
0
    def __init__(self,
                 bands=[],
                 wave={},
                 flux={},
                 ivar={},
                 mask=None,
                 resolution_data=None,
                 fibermap=None,
                 meta=None,
                 extra=None,
                 single=False):

        self._bands = bands
        self._single = single
        self._ftype = np.float64

        if single:
            self._ftype = np.float32

        self.meta = None

        if meta is None:
            self.meta = {}

        else:
            self.meta = meta.copy()

        nspec = 0

        # check consistency of input dimensions
        for b in self._bands:
            if wave[b].ndim != 1:
                raise RuntimeError(
                    "wavelength array for band {} should have dim == 1".format(
                        b))
            if flux[b].ndim != 2:
                raise RuntimeError(
                    "flux array for band {} should have dim == 2".format(b))
            if flux[b].shape[1] != wave[b].shape[0]:
                raise RuntimeError(
                    "flux array wavelength dimension for band {} does not match wavelength grid"
                    .format(b))
            if nspec is None:
                nspec = flux[b].shape[0]
            if fibermap is not None:
                if fibermap.dtype != spectra_dtype():
                    print(fibermap.dtype)
                    print(spectra_dtype())
                    raise RuntimeError(
                        "fibermap data type does not match desispec.spectra.spectra_columns"
                    )
                if len(fibermap) != flux[b].shape[0]:
                    raise RuntimeError(
                        "flux array number of spectra for band {} does not match fibermap"
                        .format(b))
            if ivar[b].shape != flux[b].shape:
                raise RuntimeError(
                    "ivar array dimensions do not match flux for band {}".
                    format(b))
            if mask is not None:
                if mask[b].shape != flux[b].shape:
                    raise RuntimeError(
                        "mask array dimensions do not match flux for band {}".
                        format(b))
                if mask[b].dtype not in (int, np.int64, np.int32, np.uint64,
                                         np.uint32):
                    raise RuntimeError("bad mask type {}".format(mask.dtype))
            if resolution_data is not None:
                if resolution_data[b].ndim != 3:
                    raise RuntimeError(
                        "resolution array for band {} should have dim == 3".
                        format(b))
                if resolution_data[b].shape[0] != flux[b].shape[0]:
                    raise RuntimeError(
                        "resolution array spectrum dimension for band {} does not match flux"
                        .format(b))
                if resolution_data[b].shape[2] != wave[b].shape[0]:
                    raise RuntimeError(
                        "resolution array wavelength dimension for band {} does not match grid"
                        .format(b))
            if extra is not None:
                for ex in extra[b].items():
                    if ex[1].shape != flux[b].shape:
                        raise RuntimeError(
                            "extra arrays must have the same shape as the flux array"
                        )

        # copy data
        if fibermap is not None:
            self.fibermap = fibermap.copy()

        else:
            # create bogus fibermap table.
            fmap = np.zeros(shape=(nspec, ), dtype=spectra_columns())
            if nspec > 0:
                fake = np.arange(nspec, dtype=np.int32)
                fiber = np.mod(fake, 5000).astype(np.int32)
                expid = np.floor_divide(fake, 5000).astype(np.int32)
                fmap[:]["EXPID"] = expid
                fmap[:]["FIBER"] = fiber
            self.fibermap = encode_table(fmap)  #- unicode -> bytes

        self.wave = {}
        self.flux = {}
        self.ivar = {}

        if mask is None:
            self.mask = None

        else:
            self.mask = {}

        if resolution_data is None:
            self.resolution_data = None
            self.R = None

        else:
            self.resolution_data = {}
            self.R = {}

        if extra is None:
            self.extra = None

        else:
            self.extra = {}

        for b in self._bands:
            self.wave[b] = np.copy(wave[b].astype(self._ftype))
            self.flux[b] = np.copy(flux[b].astype(self._ftype))
            self.ivar[b] = np.copy(ivar[b].astype(self._ftype))

            if mask is not None:
                self.mask[b] = np.copy(mask[b])

            if resolution_data is not None:
                self.resolution_data[b] = resolution_data[b].astype(
                    self._ftype)
                self.R[b] = np.array(
                    [Resolution(r) for r in resolution_data[b]])

            if extra is not None:
                self.extra[b] = {}

                for ex in extra[b].items():
                    self.extra[b][ex[0]] = np.copy(ex[1].astype(self._ftype))
示例#14
0
文件: io.py 项目: desihub/desispec
def write_qframe(outfile, qframe, header=None, fibermap=None, units=None):
    """Write a frame fits file and returns path to file written.

    Args:
        outfile: full path to output file, or tuple (night, expid, channel)
        qframe:  desispec.qproc.QFrame object with wave, flux, ivar...

    Optional:
        header: astropy.io.fits.Header or dict to override frame.header
        fibermap: table to store as FIBERMAP HDU

    Returns:
        full filepath of output file that was written

    Note:
        to create a QFrame object to pass into write_qframe,
        qframe = QFrame(wave, flux, ivar)
    """
    log = get_logger()
    outfile = makepath(outfile, 'qframe')

    if header is not None:
        hdr = fitsheader(header)
    else:
        hdr = fitsheader(qframe.meta)

    add_dependencies(hdr)

    hdus = fits.HDUList()
    x = fits.PrimaryHDU(qframe.flux.astype('f4'), header=hdr)
    x.header['EXTNAME'] = 'FLUX'
    if units is not None:
        units = str(units)
        if 'BUNIT' in hdr and hdr['BUNIT'] != units:
            log.warning('BUNIT {bunit} != units {units}; using {units}'.format(
                        bunit=hdr['BUNIT'], units=units))
        x.header['BUNIT'] = units
    hdus.append(x)

    hdus.append( fits.ImageHDU(qframe.ivar.astype('f4'), name='IVAR') )
    if qframe.mask is None :
        qframe.mask=np.zeros(qframe.flux.shape,dtype=np.uint32)
    # hdus.append( fits.CompImageHDU(qframe.mask, name='MASK') )
    hdus.append( fits.ImageHDU(qframe.mask, name='MASK') )

    if qframe.sigma is None :
        qframe.sigma=np.zeros(qframe.flux.shape,dtype=np.float)
    hdus.append( fits.ImageHDU(qframe.sigma.astype('f4'), name='YSIGMA') )

    hdus.append( fits.ImageHDU(qframe.wave.astype('f8'), name='WAVELENGTH') )
    hdus[-1].header['BUNIT'] = 'Angstrom'
    if fibermap is not None:
        fibermap = encode_table(fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append( fits.convenience.table_to_hdu(fibermap) )
    elif qframe.fibermap is not None:
        fibermap = encode_table(qframe.fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append( fits.convenience.table_to_hdu(fibermap) )
    elif qframe.spectrograph is not None:
        x.header['FIBERMIN'] = 500*qframe.spectrograph  # Hard-coded (as in desispec.qproc.qframe)
    else:
        log.error("You are likely writing a qframe without sufficient fiber info")

    hdus.writeto(outfile+'.tmp', clobber=True, checksum=True)
    os.rename(outfile+'.tmp', outfile)

    return outfile
示例#15
0
def read_spectra(infile, single=False, coadd=None):
    """Read Spectra object from FITS file.

    This reads data written by the write_spectra function.  A new Spectra
    object is instantiated and returned.

    Args:
        infile (str): path to read
        single (bool): if True, keep spectra as single precision in memory.
        coadd (array-like): if set, coadd all spectra from the provided targetids.

    Returns (Spectra):
        The object containing the data read from disk.

    """

    ftype = np.float64
    if single:
        ftype = np.float32

    infile = os.path.abspath(infile)
    if not os.path.isfile(infile):
        raise FileNotFoundError("{} is not a file!".format(infile))

    # initialize data objects
    bands = []
    fmap = None
    wave = None
    flux = None
    ivar = None
    mask = None
    res = None
    extra = None
    scores = None

    with fits.open(infile, mode="readonly") as hdulist:
        nhdu = len(hdulist)

        # load the metadata.
        meta = hdulist[0].header

        # For efficiency, go through the HDUs in disk-order.  Use the
        # extension name to determine where to put the data.  We don't
        # explicitly copy the data, since that will be done when constructing
        # the Spectra object.

        for h in range(1, nhdu):
            name = hdulist[h].header["EXTNAME"]
            if name == "FIBERMAP":
                fmap = encode_table(Table(hdulist[h].data, copy=True).as_array())
            elif name == "SCORES":
                scores = encode_table(Table(hdulist[h].data, copy=True).as_array())
            else:
                # Find the band based on the name
                mat = re.match(r"(.*)_(.*)", name)
                if mat is None:
                    raise RuntimeError("FITS extension name {} does not contain the band".format(name))
                band = mat.group(1).lower()
                type = mat.group(2)
                if band not in bands:
                    bands.append(band)
                if type == "WAVELENGTH":
                    if wave is None:
                        wave = {}
                    wave[band] = native_endian(hdulist[h].data.astype(ftype))
                elif type == "FLUX":
                    if flux is None:
                        flux = {}
                    flux[band] = native_endian(hdulist[h].data.astype(ftype))
                elif type == "IVAR":
                    if ivar is None:
                        ivar = {}
                    ivar[band] = native_endian(hdulist[h].data.astype(ftype))
                elif type == "MASK":
                    if mask is None:
                        mask = {}
                    mask[band] = native_endian(hdulist[h].data.astype(np.uint32))
                elif type == "RESOLUTION":
                    if res is None:
                        res = {}
                    res[band] = native_endian(hdulist[h].data.astype(ftype))
                else:
                    # this must be an "extra" HDU
                    if extra is None:
                        extra = {}
                    if band not in extra:
                        extra[band] = {}
                    extra[band][type] = native_endian(hdulist[h].data.astype(ftype))

    if coadd is not None:
        uniq, indices = np.unique(coadd, return_index=True)
        targetids = uniq[indices.argsort()]
        ntargets = len(targetids)
        cwave = dict()
        cflux = dict()
        civar = dict()
        crdat = dict()
        cmask = dict()
        for channel in bands:
            cwave[channel] = wave[channel].copy()
            nwave = len(cwave[channel])
            cflux[channel] = np.zeros((ntargets, nwave))
            civar[channel] = np.zeros((ntargets, nwave))
            ndiag = res[channel].shape[1]
            crdat[channel] = np.zeros((ntargets, ndiag, nwave))
            cmask[channel] = np.zeros((ntargets, nwave), dtype=mask[channel].dtype)
        #- Loop over targets, coadding all spectra for each target
        fibermap = Table(dtype=fmap.dtype)
        for i, targetid in enumerate(targetids):
            ii = np.where(fmap['TARGETID'] == targetid)[0]
            fibermap.add_row(fmap[ii[0]])
            for channel in bands:
                if len(ii) > 1:
                    outwave, outflux, outivar, outrdat = _coadd(
                        wave[channel],
                        flux[channel][ii],
                        ivar[channel][ii],
                        res[channel][ii]
                        )
                    outmask = mask[channel][ii[0]]
                    for j in range(1, len(ii)):
                        outmask |= mask[channel][ii[j]]
                else:
                    outwave, outflux, outivar, outrdat = (
                        wave[channel],
                        flux[channel][ii[0]],
                        ivar[channel][ii[0]],
                        res[channel][ii[0]]
                        )
                    outmask = mask[channel][ii[0]]

                cflux[channel][i] = outflux
                civar[channel][i] = outivar
                crdat[channel][i] = outrdat
                cmask[channel][i] = outmask

        return Spectra(bands, cwave, cflux, civar, mask=cmask, resolution_data=crdat,
                       fibermap=fibermap, meta=meta, extra=extra, single=single,
                       scores=scores)

    # Construct the Spectra object from the data.  If there are any
    # inconsistencies in the sizes of the arrays read from the file,
    # they will be caught by the constructor.

    return Spectra(bands, wave, flux, ivar, mask=mask, resolution_data=res,
                   fibermap=fmap, meta=meta, extra=extra, single=single,
                   scores=scores)
示例#16
0
    def update(self, other):
        """
        Overwrite or append new data.

        Given another Spectra object, compare the fibermap information with
        the existing one.  For spectra that already exist, overwrite existing
        data with the new values.  For spectra that do not exist, append that
        data to the end of the spectral data.

        Args:
            other (Spectra): the new data to add.

        Returns:
            nothing (object updated in place).

        """
        if not isinstance(other, Spectra):
            raise ValueError("New data has incorrect type!")

        # Does the other Spectra object have any data?

        if other.num_spectra() == 0:
            return

        # Do we have new bands to add?

        newbands = []
        for b in other.bands:
            if b not in self.bands:
                newbands.append(b)
            else:
                if not np.allclose(self.wave[b], other.wave[b]):
                    raise ValueError("Band {} has an incompatible wavelength grid.".format(b))

        bands = list(self.bands)
        bands.extend(newbands)

        # Are we adding mask data in this update?

        add_mask = False
        if other.mask is None:
            if self.mask is not None:
                raise ValueError("Existing spectra has a mask, cannot "
                                 "update it to a spectra with no mask.")
        else:
            if self.mask is None:
                add_mask = True

        # Are we adding resolution data in this update?

        ndiag = {}

        add_res = False
        if other.resolution_data is None:
            if self.resolution_data is not None:
                raise ValueError("Existing spectra has resolution data, cannot "
                                 "update it to a spectra with none.")
        else:
            if self.resolution_data is not None:
                for b in self.bands:
                    ndiag[b] = self.resolution_data[b].shape[1]
                for b in other.bands:
                    odiag = other.resolution_data[b].shape[1]
                    if b not in self.bands:
                        ndiag[b] = odiag
                    else:
                        if odiag != ndiag[b]:
                            raise ValueError("Resolution matrices for a"
                                             " given band must have the same dimensions.")
            else:
                add_res = True
                for b in other.bands:
                    ndiag[b] = other.resolution_data[b].shape[1]

        # Are we adding extra data in this update?

        add_extra = False
        if other.extra is None:
            if self.extra is not None:
                raise ValueError("Existing spectra has extra data, cannot "
                                 "update it to a spectra with none.")
        else:
            if self.extra is None:
                add_extra = True

        # Compute which targets / exposures are new

        nother = len(other.fibermap)
        exists = np.zeros(nother, dtype=np.int)

        indx_original = []

        if self.fibermap is not None:
            for r in range(nother):
                expid = other.fibermap[r]["EXPID"]
                fiber = other.fibermap[r]["FIBER"]
                for i, row in enumerate(self.fibermap):
                    if (expid == row["EXPID"]) and (fiber == row["FIBER"]):
                        indx_original.append(i)
                        exists[r] += 1

        if len(np.where(exists > 1)[0]) > 0:
            raise ValueError("Found duplicate spectra (same EXPID and FIBER) in the fibermap.")

        indx_exists = np.where(exists == 1)[0]
        indx_new = np.where(exists == 0)[0]

        # Make new data arrays of the correct size to hold both the old and
        # new data

        nupdate = len(indx_exists)
        nnew = len(indx_new)

        if self.fibermap is None:
            nold = 0
            newfmap = other.fibermap.copy()
        else:
            nold = len(self.fibermap)
            newfmap = encode_table(np.zeros( (nold + nnew, ),
                                   dtype=self.fibermap.dtype))

        if self.scores is None:
            if other.scores is None:
                newscores = None
            else:
                newscores = other.scores.copy()
        else:
            newscores = encode_table(np.zeros( (nold + nnew, ),
                                     dtype=self.scores.dtype))

        newwave = {}
        newflux = {}
        newivar = {}

        newmask = None
        if add_mask or self.mask is not None:
            newmask = {}

        newres = None
        newR = None
        if add_res or self.resolution_data is not None:
            newres = {}
            newR = {}

        newextra = None
        if add_extra or self.extra is not None:
            newextra = {}

        for b in bands:
            nwave = None
            if b in self.bands:
                nwave = self.wave[b].shape[0]
                newwave[b] = self.wave[b]
            else:
                nwave = other.wave[b].shape[0]
                newwave[b] = other.wave[b].astype(self._ftype)
            newflux[b] = np.zeros( (nold + nnew, nwave), dtype=self._ftype)
            newivar[b] = np.zeros( (nold + nnew, nwave), dtype=self._ftype)
            if newmask is not None:
                newmask[b] = np.zeros( (nold + nnew, nwave), dtype=np.uint32)
                newmask[b][:,:] = specmask["NODATA"]
            if newres is not None:
                newres[b] = np.zeros( (nold + nnew, ndiag[b], nwave), dtype=self._ftype)
            if newextra is not None:
                newextra[b] = {}

        # Copy the old data

        if nold > 0:
            # We have some data (i.e. we are not starting with an empty Spectra)
            newfmap[:nold] = self.fibermap
            if newscores is not None:
                newscores[:nold] = self.scores

            for b in self.bands:
                newflux[b][:nold,:] = self.flux[b]
                newivar[b][:nold,:] = self.ivar[b]
                if self.mask is not None:
                    newmask[b][:nold,:] = self.mask[b]
                elif add_mask:
                    newmask[b][:nold,:] = 0
                if self.resolution_data is not None:
                    newres[b][:nold,:,:] = self.resolution_data[b]
                if self.extra is not None:
                    for ex in self.extra[b].items():
                        newextra[b][ex[0]] = np.zeros( newflux[b].shape,
                            dtype=self._ftype)
                        newextra[b][ex[0]][:nold,:] = ex[1]

        # Update existing spectra

        for i, s in enumerate(indx_exists):
            row = indx_original[i]
            for b in other.bands:
                newflux[b][row,:] = other.flux[b][s,:].astype(self._ftype)
                newivar[b][row,:] = other.ivar[b][s,:].astype(self._ftype)
                if other.mask is not None:
                    newmask[b][row,:] = other.mask[b][s,:]
                else:
                    newmask[b][row,:] = 0
                if other.resolution_data is not None:
                    newres[b][row,:,:] = other.resolution_data[b][s,:,:].astype(self._ftype)
                if other.extra is not None:
                    for ex in other.extra[b].items():
                        if ex[0] not in newextra[b]:
                            newextra[b][ex[0]] = np.zeros(newflux[b].shape,
                                dtype=self._ftype)
                        newextra[b][ex[0]][row,:] = ex[1][s,:].astype(self._ftype)

        # Append new spectra

        if nnew > 0:
            newfmap[nold:] = other.fibermap[indx_new]
            if newscores is not None:
                newscores[nold:] = other.scores[indx_new]

            for b in other.bands:
                newflux[b][nold:,:] = other.flux[b][indx_new].astype(self._ftype)
                newivar[b][nold:,:] = other.ivar[b][indx_new].astype(self._ftype)
                if other.mask is not None:
                    newmask[b][nold:,:] = other.mask[b][indx_new]
                else:
                    newmask[b][nold:,:] = 0
                if other.resolution_data is not None:
                    newres[b][nold:,:,:] = other.resolution_data[b][indx_new].astype(self._ftype)
                if other.extra is not None:
                    for ex in other.extra[b].items():
                        if ex[0] not in newextra[b]:
                            newextra[b][ex[0]] = np.zeros(newflux[b].shape,
                                dtype=self._ftype)
                        newextra[b][ex[0]][nold:,:] = ex[1][indx_new].astype(self._ftype)

        # Swap data into place

        self._bands = bands
        self.fibermap = newfmap
        self.scores = newscores
        self._reset_properties()
        for i, b in enumerate(self._bands):
            band_meta = dict()
            if newmask is None:
                band_meta['mask'] = None
                bool_mask = None
            else:
                band_meta['mask'] = newmask[b]
                bool_mask = band_meta['mask'] != 0
            if newres is None:
                band_meta['resolution_data'] = None
                band_meta['R'] = None
            else:
                band_meta['resolution_data'] = newres[b]
                band_meta['R'] = np.array([Resolution(r) for r in newres[b]])
            if newextra is None:
                band_meta['extra'] = None
            else:
                band_meta['extra'] = dict()
                for k, v in newextra[b].items():
                    band_meta['extra'][k] = v
            s = Spectrum1D(spectral_axis=newwave[b]*u.Angstrom,
                           flux=newflux[b]*u.Unit('10**-17 erg/(s cm2 Angstrom)'),
                           uncertainty=InverseVariance(newivar),
                           mask=bool_mask,
                           meta=band_meta)
            try:
                self[i] = s
            except IndexError:
                self.append(s)
        return
示例#17
0
文件: frame.py 项目: desihub/desispec
def write_frame(outfile, frame, header=None, fibermap=None, units=None):
    """Write a frame fits file and returns path to file written.

    Args:
        outfile: full path to output file, or tuple (night, expid, channel)
        frame:  desispec.frame.Frame object with wave, flux, ivar...

    Optional:
        header: astropy.io.fits.Header or dict to override frame.header
        fibermap: table to store as FIBERMAP HDU

    Returns:
        full filepath of output file that was written

    Note:
        to create a Frame object to pass into write_frame,
        frame = Frame(wave, flux, ivar, resolution_data)
    """
    log = get_logger()
    outfile = makepath(outfile, 'frame')

    #- Ignore some known and harmless units warnings
    import warnings
    warnings.filterwarnings('ignore', message="'.*nanomaggies.* did not parse as fits unit.*")
    warnings.filterwarnings('ignore', message=".*'10\*\*6 arcsec.* did not parse as fits unit.*")

    if header is not None:
        hdr = fitsheader(header)
    else:
        hdr = fitsheader(frame.meta)

    add_dependencies(hdr)

    # Vette
    diagnosis = frame.vet()
    if diagnosis != 0:
        raise IOError("Frame did not pass simple vetting test. diagnosis={:d}".format(diagnosis))

    hdus = fits.HDUList()
    x = fits.PrimaryHDU(frame.flux.astype('f4'), header=hdr)
    x.header['EXTNAME'] = 'FLUX'
    if units is not None:
        units = str(units)
        if 'BUNIT' in hdr and hdr['BUNIT'] != units:
            log.warning('BUNIT {bunit} != units {units}; using {units}'.format(
                        bunit=hdr['BUNIT'], units=units))
        x.header['BUNIT'] = units
    hdus.append(x)

    hdus.append( fits.ImageHDU(frame.ivar.astype('f4'), name='IVAR') )
    # hdus.append( fits.CompImageHDU(frame.mask, name='MASK') )
    hdus.append( fits.ImageHDU(frame.mask, name='MASK') )
    hdus.append( fits.ImageHDU(frame.wave.astype('f8'), name='WAVELENGTH') )
    hdus[-1].header['BUNIT'] = 'Angstrom'
    if frame.resolution_data is not None:
        hdus.append( fits.ImageHDU(frame.resolution_data.astype('f4'), name='RESOLUTION' ) )
    elif frame.wsigma is not None:
        log.debug("Using ysigma from qproc")
        qrimg=fits.ImageHDU(frame.wsigma.astype('f4'), name='YSIGMA' )
        qrimg.header["NDIAG"] =frame.ndiag
        hdus.append(qrimg)
    if fibermap is not None:
        fibermap = encode_table(fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append( fits.convenience.table_to_hdu(fibermap) )
    elif frame.fibermap is not None:
        fibermap = encode_table(frame.fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append( fits.convenience.table_to_hdu(fibermap) )
    elif frame.spectrograph is not None:
        x.header['FIBERMIN'] = 500*frame.spectrograph  # Hard-coded (as in desispec.frame)
    else:
        log.error("You are likely writing a frame without sufficient fiber info")

    if frame.chi2pix is not None:
        hdus.append( fits.ImageHDU(frame.chi2pix.astype('f4'), name='CHI2PIX' ) )

    if frame.scores is not None :
        scores_tbl = encode_table(frame.scores)  #- unicode -> bytes
        scores_tbl.meta['EXTNAME'] = 'SCORES'
        hdus.append( fits.convenience.table_to_hdu(scores_tbl) )
        if frame.scores_comments is not None : # add comments in header
            hdu=hdus['SCORES']
            for i in range(1,999):
                key = 'TTYPE'+str(i)
                if key in hdu.header:
                    value = hdu.header[key]
                    if value in frame.scores_comments.keys() :
                        hdu.header[key] = (value, frame.scores_comments[value])

    hdus.writeto(outfile+'.tmp', overwrite=True, checksum=True)

    os.rename(outfile+'.tmp', outfile)

    return outfile
示例#18
0
    def __init__(self, path, mode='readonly', header=None):
        from .util import fitsheader
        from .fibermap import fibermap_columns, fibermap_comments
        if mode not in ('readonly', 'update'):
            raise RuntimeError('Invalid mode %r' % mode)
        self.path = path
        self.mode = mode
        # Create a new file if necessary.
        if self.mode == 'update' and not os.path.exists(self.path):
            # BRICKNAM must be in header if creating the file for the first time
            if header is None or 'BRICKNAM' not in header:
                raise ValueError(
                    'header must have BRICKNAM when creating new brick file')

            self.brickname = header['BRICKNAM']
            if 'CHANNEL' in header:
                self.channel = header['CHANNEL']
            else:
                self.channel = 'brz'  #- could be any spectrograph channel

            # Create the parent directory, if necessary.
            head, tail = os.path.split(os.path.abspath(self.path))
            if not os.path.exists(head):
                os.makedirs(head)
            # Create empty HDUs. It would be good to refactor io.frame to avoid any duplication here.
            hdr = fitsheader(header)
            add_dependencies(hdr)
            hdr['EXTNAME'] = ('FLUX', '1e-17 erg/(s cm2 Angstrom)')
            hdr['BUNIT'] = '1e-17 erg/(s cm2 Angstrom)'
            hdu0 = astropy.io.fits.PrimaryHDU(header=hdr)
            hdu1 = astropy.io.fits.ImageHDU(name='IVAR')
            hdu2 = astropy.io.fits.ImageHDU(name='WAVELENGTH')
            hdu2.header['BUNIT'] = 'Angstrom'
            hdu3 = astropy.io.fits.ImageHDU(name='RESOLUTION')
            # Create an HDU4 using the columns from fibermap with a few extras added.
            columns = fibermap_columns[:]
            columns.extend([
                ('NIGHT', 'i4'),
                ('EXPID', 'i4'),
                ('INDEX', 'i4'),
            ])
            data = np.empty(shape=(0, ), dtype=columns)
            data = encode_table(data)  #- unicode -> bytes
            data.meta['EXTNAME'] = 'FIBERMAP'
            for key, value in header.items():
                data.meta[key] = value
            hdu4 = astropy.io.fits.convenience.table_to_hdu(data)

            # Add comments for fibermap columns.
            num_fibermap_columns = len(fibermap_comments)
            for i in range(1, 1 + num_fibermap_columns):
                key = 'TTYPE%d' % i
                name = hdu4.header[key]
                comment = fibermap_comments[name]
                hdu4.header[key] = (name, comment)
            # Add comments for our additional columns.
            hdu4.header['TTYPE%d' % (1 + num_fibermap_columns)] = (
                'NIGHT', 'Night of exposure YYYYMMDD')
            hdu4.header['TTYPE%d' %
                        (2 + num_fibermap_columns)] = ('EXPID', 'Exposure ID')
            hdu4.header['TTYPE%d' % (3 + num_fibermap_columns)] = (
                'INDEX', 'Index of this object in other HDUs')
            self.hdu_list = astropy.io.fits.HDUList(
                [hdu0, hdu1, hdu2, hdu3, hdu4])
        else:
            self.hdu_list = astropy.io.fits.open(path, mode=self.mode)
            try:
                self.brickname = self.hdu_list[0].header['BRICKNAM']
                self.channel = self.hdu_list[0].header['CHANNEL']
            except KeyError:
                self.channel, self.brickname = _parse_brick_filename(path)
示例#19
0
def write_bintable(filename, data, header=None, comments=None, units=None,
                   extname=None, clobber=False):
    """Utility function to write a fits binary table complete with
    comments and units in the FITS header too.  DATA can either be
    dictionary, an Astropy Table, a numpy.recarray or a numpy.ndarray.
    """
    from astropy.table import Table
    from desiutil.io import encode_table

    log = get_logger()
    
    #- Convert data as needed
    if isinstance(data, (np.recarray, np.ndarray, Table)):
        outdata = encode_table(data, encoding='ascii')
    else:
        outdata = encode_table(_dict2ndarray(data), encoding='ascii')

    # hdu = astropy.io.fits.BinTableHDU(outdata, header=header, name=extname)
    hdu = astropy.io.fits.convenience.table_to_hdu(outdata)
    if extname is not None:
        hdu.header['EXTNAME'] = extname
    else:
        extname = 1

    if header is not None:
        if isinstance(header, astropy.io.fits.header.Header):
            for key, value in header.items():
                comment = header.comments[key]
                hdu.header[key] = (value, comment)
        else:
            hdu.header.update(header)

    #- Write the data and header

    
    if os.path.isfile(filename) :
        if extname is None :
            if clobber :
                #- overwrite file
                log.debug("overwriting {}".format(filename))
                astropy.io.fits.writeto(filename, hdu.data, hdu.header, clobber=True, checksum=True)
            else :
                #- append file
                log.debug("adding new HDU to {}".format(filename))
                astropy.io.fits.append(filename, hdu.data, hdu.header, checksum=True)
        else :
            #- we need to open the file and only overwrite the extension
            fx = astropy.io.fits.open(filename,mode='update')
            if extname in fx :                
                if not clobber :
                    log.warning("do not modify {} because extname {} exists".format(filename,extname))
                    return
                #- need replace here
                log.debug("replacing HDU {} in {}".format(extname,filename))
                fx[extname]=hdu
            else :
                log.debug("adding HDU {} to {}".format(extname,filename))
                fx.append(hdu)
            #- Write updates and close file
            fx.flush()
            fx.close()
    else :
        log.debug("writing new file {}".format(filename))
        astropy.io.fits.writeto(filename, hdu.data, hdu.header, checksum=True)
    

    #- TODO:
    #- The following could probably be implemented for efficiently by updating
    #- the outdata Table metadata directly before writing it out.
    #- The following was originally implemented when outdata was a numpy array.

    #- Allow comments and units to be None
    if comments is None:
        comments = dict()
    if units is None:
        units = dict()

    #- Reopen the file to add the comments and units
    fx = astropy.io.fits.open(filename, mode='update')
    hdu = fx[extname]
    for i in range(1,999):
        key = 'TTYPE'+str(i)
        if key not in hdu.header:
            break
        else:
            value = hdu.header[key]
            if value in comments:
                hdu.header[key] = (value, comments[value])
            if value in units:
                hdu.header['TUNIT'+str(i)] = (units[value], value+' units')

    #- Write updated header and close file
    fx.flush()
    fx.close()
示例#20
0
    def setUp(self):
        #- catch specific warnings so that we can find and fix
        # warnings.filterwarnings("error", ".*did not parse as fits unit.*")

        #- Test data and files to work with
        self.fileio = "test_spectra.fits"
        self.fileappend = "test_spectra_append.fits"
        self.filebuild = "test_spectra_build.fits"
        self.meta = {"KEY1": "VAL1", "KEY2": "VAL2"}
        self.nwave = 100
        self.nspec = 5
        self.ndiag = 3

        fmap = empty_fibermap(self.nspec)
        fmap = add_columns(
            fmap,
            ['NIGHT', 'EXPID', 'TILEID'],
            [np.int32(0), np.int32(0), np.int32(0)],
        )

        for s in range(self.nspec):
            fmap[s]["TARGETID"] = 456 + s
            fmap[s]["FIBER"] = 123 + s
            fmap[s]["NIGHT"] = s
            fmap[s]["EXPID"] = s
        self.fmap1 = encode_table(fmap)

        fmap = empty_fibermap(self.nspec)
        fmap = add_columns(
            fmap,
            ['NIGHT', 'EXPID', 'TILEID'],
            [np.int32(0), np.int32(0), np.int32(0)],
        )

        for s in range(self.nspec):
            fmap[s]["TARGETID"] = 789 + s
            fmap[s]["FIBER"] = 200 + s
            fmap[s]["NIGHT"] = 1000
            fmap[s]["EXPID"] = 1000 + s
        self.fmap2 = encode_table(fmap)

        for s in range(self.nspec):
            fmap[s]["TARGETID"] = 1234 + s
            fmap[s]["FIBER"] = 300 + s
            fmap[s]["NIGHT"] = 2000
            fmap[s]["EXPID"] = 2000 + s
        self.fmap3 = encode_table(fmap)

        self.bands = ["b", "r", "z"]

        self.wave = {}
        self.flux = {}
        self.ivar = {}
        self.mask = {}
        self.res = {}
        self.extra = {}

        for s in range(self.nspec):
            for b in self.bands:
                self.wave[b] = np.arange(self.nwave)
                self.flux[b] = np.repeat(np.arange(self.nspec),
                                         self.nwave).reshape(
                                             (self.nspec, self.nwave)) + 3.0
                self.ivar[b] = 1.0 / self.flux[b]
                self.mask[b] = np.tile(np.arange(2, dtype=np.uint32),
                                       (self.nwave * self.nspec) // 2).reshape(
                                           (self.nspec, self.nwave))
                self.res[b] = np.zeros((self.nspec, self.ndiag, self.nwave),
                                       dtype=np.float64)
                self.res[b][:, 1, :] = 1.0
                self.extra[b] = {}
                self.extra[b]["FOO"] = self.flux[b]

        self.scores = dict(BLAT=np.arange(self.nspec), FOO=np.ones(self.nspec))
        self.extra_catalog = Table()
        self.extra_catalog['A'] = np.arange(self.nspec)
        self.extra_catalog['B'] = np.ones(self.nspec)
示例#21
0
文件: desi.py 项目: tskisner/redrock
    def __init__(self,
                 spectrafiles,
                 coadd=True,
                 targetids=None,
                 first_target=None,
                 n_target=None,
                 comm=None):

        comm_size = 1
        comm_rank = 0
        if comm is not None:
            comm_size = comm.size
            comm_rank = comm.rank

        # check the file list
        if isinstance(spectrafiles, basestring):
            import glob
            spectrafiles = glob.glob(spectrafiles)

        assert len(spectrafiles) > 0

        self._spectrafiles = spectrafiles

        # This is the mapping between specs to targets for each file

        self._spec_to_target = {}
        self._target_specs = {}
        self._spec_keep = {}
        self._spec_sliced = {}

        # The bands for each file

        self._bands = {}
        self._wave = {}

        # The full list of targets from all files

        self._alltargetids = set()

        # The fibermaps from all files

        self._fmaps = {}

        for sfile in spectrafiles:
            hdus = None
            nhdu = None
            fmap = None
            if comm_rank == 0:
                hdus = fits.open(sfile, memmap=True)
                nhdu = len(hdus)
                fmap = encode_table(
                    Table(hdus["FIBERMAP"].data, copy=True).as_array())

            if comm is not None:
                nhdu = comm.bcast(nhdu, root=0)
                fmap = comm.bcast(fmap, root=0)

            # Now every process has the fibermap and number of HDUs.  Build the
            # mapping between spectral rows and target IDs.

            keep_targetids = targetids
            if targetids is None:
                keep_targetids = fmap["TARGETID"]

            # Select a subset of the target range from each file if desired.

            if first_target is None:
                first_target = 0
            if first_target > len(keep_targetids):
                raise RuntimeError("first_target value \"{}\" is beyond the "
                    "number of selected targets in the file".\
                    format(first_target))

            if n_target is None:
                n_target = len(keep_targetids)
            if first_target + n_target > len(keep_targetids):
                raise RuntimeError(
                    "Requested first_target / n_target "
                    " range is larger than the number of selected targets "
                    " in the file")

            keep_targetids = keep_targetids[first_target:first_target +
                                            n_target]

            self._alltargetids.update(keep_targetids)

            # This is the spectral row to target mapping using the original
            # global indices (before slicing).

            self._spec_to_target[sfile] = [ x if y in keep_targetids else -1 \
                for x, y in enumerate(fmap["TARGETID"]) ]

            # The reduced set of spectral rows.

            self._spec_keep[sfile] = [ x for x in self._spec_to_target[sfile] \
                if x >= 0 ]

            # The mapping between original spectral indices and the sliced ones

            self._spec_sliced[sfile] = { x : y for y, x in \
                enumerate(self._spec_keep[sfile]) }

            # Slice the fibermap

            self._fmaps[sfile] = fmap[self._spec_keep[sfile]]

            # For each target, store the sliced row index of all spectra,
            # so that we can do a fast lookup later.

            self._target_specs[sfile] = {}
            for id in keep_targetids:
                self._target_specs[sfile][id] = [ x for x, y in \
                    enumerate(self._fmaps[sfile]["TARGETID"]) if y == id ]

            # We need some more metadata information for each file-
            # specifically, the bands that are used and their wavelength grids.
            # That information will allow us to pre-allocate our local target
            # list and then fill that with one pass through all HDUs in the
            # files.

            self._bands[sfile] = []
            self._wave[sfile] = dict()

            if comm_rank == 0:
                for h in range(nhdu):
                    name = None
                    if "EXTNAME" not in hdus[h].header:
                        continue
                    name = hdus[h].header["EXTNAME"]
                    mat = re.match(r"(.*)_(.*)", name)
                    if mat is None:
                        continue
                    band = mat.group(1).lower()
                    if band not in self._bands[sfile]:
                        self._bands[sfile].append(band)
                    htype = mat.group(2)

                    if htype == "WAVELENGTH":
                        self._wave[sfile][band] = \
                            hdus[h].data.astype(np.float64).copy()

            if comm is not None:
                self._bands[sfile] = comm.bcast(self._bands[sfile], root=0)
                self._wave[sfile] = comm.bcast(self._wave[sfile], root=0)

            if comm_rank == 0:
                hdus.close()

        self._keep_targets = list(sorted(self._alltargetids))

        # Now we have the metadata for all targets in all files.  Distribute
        # the targets among process weighted by the amount of work to do for
        # each target.  This weight is either "1" if we are going to use coadds
        # or the number of spectra if we are using all the data.

        tweights = None
        if not coadd:
            tweights = dict()
            for t in self._keep_targets:
                tweights[t] = 0
                for sfile in spectrafiles:
                    if t in self._target_specs[sfile]:
                        tweights[t] += len(self._target_specs[sfile][t])

        self._proc_targets = distribute_work(comm_size,
                                             self._keep_targets,
                                             weights=tweights)

        self._my_targets = self._proc_targets[comm_rank]

        # Reverse mapping- target ID to index in our list
        self._my_target_indx = {y: x for x, y in enumerate(self._my_targets)}

        # Now every process has its local target IDs assigned.  Pre-create our
        # local target list with empty spectral data (except for wavelengths)

        self._my_data = list()

        for t in self._my_targets:
            speclist = list()
            tileids = set()
            exps = set()
            bname = None
            for sfile in spectrafiles:
                for b in self._bands[sfile]:
                    if t in self._target_specs[sfile]:
                        nspec = len(self._target_specs[sfile][t])
                        for s in range(nspec):
                            sindx = self._target_specs[sfile][t][s]
                            frow = self._fmaps[sfile][sindx]
                            if bname is None:
                                bname = frow["BRICKNAME"]
                            exps.add(frow["EXPID"])
                            if "TILEID" in frow.dtype.names:
                                tileids.add(frow["TILEID"])
                            speclist.append(
                                Spectrum(self._wave[sfile][b], None, None,
                                         None, None))
            # Meta dictionary for this target.  Whatever keys we put in here
            # will end up as columns in the final zbest output table.
            tmeta = dict()
            tmeta["NUMEXP"] = len(exps)
            tmeta["NUMEXP_datatype"] = "i4"
            tmeta["NUMTILE"] = len(tileids)
            tmeta["NUMTILE_datatype"] = "i4"
            tmeta["BRICKNAME"] = bname
            tmeta["BRICKNAME_datatype"] = "S8"
            self._my_data.append(Target(t, speclist, coadd=False, meta=tmeta))

        # Iterate over the data and broadcast.  Every process selects the rows
        # of each table that contain pieces of local target data and copies it
        # into place.

        # these are for tracking offsets within the spectra for each target.
        tspec_flux = {x: 0 for x in self._my_targets}
        tspec_ivar = tspec_flux.copy()
        tspec_mask = tspec_flux.copy()
        tspec_res = tspec_flux.copy()

        for sfile in spectrafiles:
            rows = self._spec_keep[sfile]
            if len(rows) == 0:
                continue

            hdus = None
            if comm_rank == 0:
                hdus = fits.open(sfile, memmap=True)

            for b in self._bands[sfile]:
                extname = "{}_{}".format(b.upper(), "FLUX")
                hdata = None
                if comm_rank == 0:
                    hdata = hdus[extname].data[rows]
                if comm is not None:
                    hdata = comm.bcast(hdata, root=0)

                toff = 0
                for t in self._my_targets:
                    if t in self._target_specs[sfile]:
                        for trow in self._target_specs[sfile][t]:
                            self._my_data[toff].spectra[tspec_flux[t]].flux = \
                                hdata[trow].astype(np.float64).copy()
                            tspec_flux[t] += 1
                    toff += 1

                extname = "{}_{}".format(b.upper(), "IVAR")
                hdata = None
                if comm_rank == 0:
                    hdata = hdus[extname].data[rows]

                if comm is not None:
                    hdata = comm.bcast(hdata, root=0)

                toff = 0
                for t in self._my_targets:
                    if t in self._target_specs[sfile]:
                        for trow in self._target_specs[sfile][t]:
                            self._my_data[toff].spectra[tspec_ivar[t]].ivar = \
                                hdata[trow].astype(np.float64).copy()
                            tspec_ivar[t] += 1
                    toff += 1

                extname = "{}_{}".format(b.upper(), "MASK")
                hdata = None
                if comm_rank == 0:
                    if extname in hdus:
                        hdata = hdus[extname].data[rows]
                if comm is not None:
                    hdata = comm.bcast(hdata, root=0)

                if hdata is not None:
                    toff = 0
                    for t in self._my_targets:
                        if t in self._target_specs[sfile]:
                            for trow in self._target_specs[sfile][t]:
                                self._my_data[toff].spectra[tspec_mask[t]]\
                                    .ivar *= (hdata[trow] == 0)
                                tspec_mask[t] += 1
                        toff += 1

                extname = "{}_{}".format(b.upper(), "RESOLUTION")
                hdata = None
                if comm_rank == 0:
                    hdata = hdus[extname].data[rows]

                if comm is not None:
                    hdata = comm.bcast(hdata, root=0)

                toff = 0
                for t in self._my_targets:
                    if t in self._target_specs[sfile]:
                        for trow in self._target_specs[sfile][t]:
                            dia = Resolution(hdata[trow].astype(np.float64))
                            csr = dia.tocsr()
                            self._my_data[toff].spectra[tspec_res[t]].R = dia
                            self._my_data[toff].spectra[tspec_res[t]].Rcsr = \
                                csr
                            tspec_res[t] += 1
                    toff += 1

                del hdata

            if comm_rank == 0:
                hdus.close()

        # Compute the coadds now if we are going to use those

        if coadd:
            for t in self._my_data:
                t.compute_coadd()

        self.fibermap = Table(np.hstack([ self._fmaps[x] \
            for x in self._spectrafiles ]))

        super(DistTargetsDESI, self).__init__(self._keep_targets, comm=comm)
示例#22
0
def write_frame(outfile, frame, header=None, fibermap=None, units=None):
    """Write a frame fits file and returns path to file written.

    Args:
        outfile: full path to output file, or tuple (night, expid, channel)
        frame:  desispec.frame.Frame object with wave, flux, ivar...

    Optional:
        header: astropy.io.fits.Header or dict to override frame.header
        fibermap: table to store as FIBERMAP HDU

    Returns:
        full filepath of output file that was written

    Note:
        to create a Frame object to pass into write_frame,
        frame = Frame(wave, flux, ivar, resolution_data)
    """
    log = get_logger()
    outfile = makepath(outfile, 'frame')

    if header is not None:
        hdr = fitsheader(header)
    else:
        hdr = fitsheader(frame.meta)

    add_dependencies(hdr)

    # Vette
    diagnosis = frame.vet()
    if diagnosis != 0:
        raise IOError(
            "Frame did not pass simple vetting test. diagnosis={:d}".format(
                diagnosis))

    hdus = fits.HDUList()
    x = fits.PrimaryHDU(frame.flux.astype('f4'), header=hdr)
    x.header['EXTNAME'] = 'FLUX'
    if units is not None:
        units = str(units)
        if 'BUNIT' in hdr and hdr['BUNIT'] != units:
            log.warn('BUNIT {bunit} != units {units}; using {units}'.format(
                bunit=hdr['BUNIT'], units=units))
        x.header['BUNIT'] = units
    hdus.append(x)

    hdus.append(fits.ImageHDU(frame.ivar.astype('f4'), name='IVAR'))
    hdus.append(fits.CompImageHDU(frame.mask, name='MASK'))
    hdus.append(fits.ImageHDU(frame.wave.astype('f4'), name='WAVELENGTH'))
    hdus[-1].header['BUNIT'] = 'Angstrom'
    hdus.append(
        fits.ImageHDU(frame.resolution_data.astype('f4'), name='RESOLUTION'))

    if fibermap is not None:
        fibermap = encode_table(fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    elif frame.fibermap is not None:
        fibermap = encode_table(frame.fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    elif frame.spectrograph is not None:
        x.header[
            'FIBERMIN'] = 500 * frame.spectrograph  # Hard-coded (as in desispec.frame)
    else:
        log.error(
            "You are likely writing a frame without sufficient fiber info")

    if frame.chi2pix is not None:
        hdus.append(fits.ImageHDU(frame.chi2pix.astype('f4'), name='CHI2PIX'))

    hdus.writeto(outfile + '.tmp', clobber=True, checksum=True)
    os.rename(outfile + '.tmp', outfile)

    return outfile
示例#23
0
    def update(self, other):
        """
        Overwrite or append new data.

        Given another Spectra object, compare the fibermap information with
        the existing one.  For spectra that already exist, overwrite existing 
        data with the new values.  For spectra that do not exist, append that 
        data to the end of the spectral data.

        Args:
            other (Spectra): the new data to add.

        Returns:
            nothing (object updated in place).

        """

        # Does the other Spectra object have any data?

        if other.num_spectra() == 0:
            return

        # Do we have new bands to add?

        newbands = []
        for b in other.bands:
            if b not in self.bands:
                newbands.append(b)
            else:
                if not np.allclose(self.wave[b], other.wave[b]):
                    raise RuntimeError("band {} has an incompatible wavelength grid".format(b))

        bands = list(self.bands)
        bands.extend(newbands)

        # Are we adding mask data in this update?

        add_mask = False
        if other.mask is None:
            if self.mask is not None:
                raise RuntimeError("existing spectra has a mask, cannot "
                    "update it to a spectra with no mask")
        else:
            if self.mask is None:
                add_mask = True

        # Are we adding resolution data in this update?

        ndiag = {}

        add_res = False
        if other.resolution_data is None:
            if self.resolution_data is not None:
                raise RuntimeError("existing spectra has resolution data, cannot "
                    "update it to a spectra with none")
        else:
            if self.resolution_data is not None:
                for b in self.bands:
                    ndiag[b] = self.resolution_data[b].shape[1]
                for b in other.bands:
                    odiag = other.resolution_data[b].shape[1]
                    if b not in self.bands:
                        ndiag[b] = odiag
                    else:
                        if odiag != ndiag[b]:
                            raise RuntimeError("Resolution matrices for a"
                                " given band must have the same dimensoins")
            else:
                add_res = True
                for b in other.bands:
                    ndiag[b] = other.resolution_data[b].shape[1]

        # Are we adding extra data in this update?

        add_extra = False
        if other.extra is None:
            if self.extra is not None:
                raise RuntimeError("existing spectra has extra data, cannot "
                    "update it to a spectra with none")
        else:
            if self.extra is None:
                add_extra = True

        # Compute which targets / exposures are new

        nother = len(other.fibermap)
        exists = np.zeros(nother, dtype=np.int)

        indx_original = []

        if self.fibermap is not None:
            for r in range(nother):
                expid = other.fibermap[r]["EXPID"]
                fiber = other.fibermap[r]["FIBER"]
                for i, row in enumerate(self.fibermap):
                    if (expid == row["EXPID"]) and (fiber == row["FIBER"]):
                        indx_original.append(i)
                        exists[r] += 1

        if len(np.where(exists > 1)[0]) > 0:
            raise RuntimeError("found duplicate spectra (same EXPID and FIBER) in the fibermap")

        indx_exists = np.where(exists == 1)[0]
        indx_new = np.where(exists == 0)[0]

        # Make new data arrays of the correct size to hold both the old and 
        # new data

        nupdate = len(indx_exists)
        nnew = len(indx_new)

        if self.fibermap is None:
            nold = 0
            newfmap = other.fibermap.copy()
        else:
            nold = len(self.fibermap)
            newfmap = encode_table(np.zeros( (nold + nnew, ),
                                   dtype=self.fibermap.dtype))
        
        newwave = {}
        newflux = {}
        newivar = {}
        
        newmask = None
        if add_mask or self.mask is not None:
            newmask = {}
        
        newres = None
        newR = None
        if add_res or self.resolution_data is not None:
            newres = {}
            newR = {}

        newextra = None
        if add_extra or self.extra is not None:
            newextra = {}

        for b in bands:
            nwave = None
            if b in self.bands:
                nwave = self.wave[b].shape[0]
                newwave[b] = self.wave[b]
            else:
                nwave = other.wave[b].shape[0]
                newwave[b] = other.wave[b].astype(self._ftype)
            newflux[b] = np.zeros( (nold + nnew, nwave), dtype=self._ftype)
            newivar[b] = np.zeros( (nold + nnew, nwave), dtype=self._ftype)
            if newmask is not None:
                newmask[b] = np.zeros( (nold + nnew, nwave), dtype=np.uint32)
                newmask[b][:,:] = specmask["NODATA"]
            if newres is not None:
                newres[b] = np.zeros( (nold + nnew, ndiag[b], nwave), dtype=self._ftype)
            if newextra is not None:
                newextra[b] = {}

        # Copy the old data

        if nold > 0:
            # We have some data (i.e. we are not starting with an empty Spectra)
            newfmap[:nold] = self.fibermap

            for b in self.bands:
                newflux[b][:nold,:] = self.flux[b]
                newivar[b][:nold,:] = self.ivar[b]
                if self.mask is not None:
                    newmask[b][:nold,:] = self.mask[b]
                elif add_mask:
                    newmask[b][:nold,:] = 0
                if self.resolution_data is not None:
                    newres[b][:nold,:,:] = self.resolution_data[b]
                if self.extra is not None:
                    for ex in self.extra[b].items():
                        newextra[b][ex[0]] = np.zeros( newflux[b].shape,
                            dtype=self._ftype)
                        newextra[b][ex[0]][:nold,:] = ex[1]

        # Update existing spectra

        for i, s in enumerate(indx_exists):
            row = indx_original[i]
            for b in other.bands:
                newflux[b][row,:] = other.flux[b][s,:].astype(self._ftype)
                newivar[b][row,:] = other.ivar[b][s,:].astype(self._ftype)
                if other.mask is not None:
                    newmask[b][row,:] = other.mask[b][s,:]
                else:
                    newmask[b][row,:] = 0
                if other.resolution_data is not None:
                    newres[b][row,:,:] = other.resolution_data[b][s,:,:].astype(self._ftype)
                if other.extra is not None:
                    for ex in other.extra[b].items():
                        if ex[0] not in newextra[b]:
                            newextra[b][ex[0]] = np.zeros(newflux[b].shape,
                                dtype=self._ftype)
                        newextra[b][ex[0]][row,:] = ex[1][s,:].astype(self._ftype)

        # Append new spectra

        if nnew > 0:
            newfmap[nold:] = other.fibermap[indx_new]

            for b in other.bands:
                newflux[b][nold:,:] = other.flux[b][indx_new].astype(self._ftype)
                newivar[b][nold:,:] = other.ivar[b][indx_new].astype(self._ftype)
                if other.mask is not None:
                    newmask[b][nold:,:] = other.mask[b][indx_new]
                else:
                    newmask[b][nold:,:] = 0
                if other.resolution_data is not None:
                    newres[b][nold:,:,:] = other.resolution_data[b][indx_new].astype(self._ftype)
                if other.extra is not None:
                    for ex in other.extra[b].items():
                        if ex[0] not in newextra[b]:
                            newextra[b][ex[0]] = np.zeros(newflux[b].shape,
                                dtype=self._ftype)
                        newextra[b][ex[0]][nold:,:] = ex[1][indx_new].astype(self._ftype)

        # Update all sparse resolution matrices

        for b in bands:
            if newres is not None:
                newR[b] = np.array( [ Resolution(r) for r in newres[b] ] )

        # Swap data into place

        self._bands = bands
        self.wave = newwave
        self.fibermap = newfmap
        self.flux = newflux
        self.ivar = newivar
        self.mask = newmask
        self.resolution_data = newres
        self.R = newR
        self.extra = newextra

        return
示例#24
0
def read_spectra(infile, single=False):
    """
    Read Spectra object from FITS file.

    This reads data written by the write_spectra function.  A new Spectra
    object is instantiated and returned.

    Args:
        infile (str): path to read
        single (bool): if True, keep spectra as single precision in memory.

    Returns (Spectra):
        The object containing the data read from disk.

    """

    ftype = np.float64
    if single:
        ftype = np.float32

    infile = os.path.abspath(infile)
    if not os.path.isfile(infile):
        raise IOError("{} is not a file".format(infile))

    hdus = fits.open(infile, mode="readonly")
    nhdu = len(hdus)

    # load the metadata.

    meta = dict(hdus[0].header)

    # initialize data objects

    bands = []
    fmap = None
    wave = None
    flux = None
    ivar = None
    mask = None
    res = None
    extra = None
    scores = None

    # For efficiency, go through the HDUs in disk-order.  Use the
    # extension name to determine where to put the data.  We don't
    # explicitly copy the data, since that will be done when constructing
    # the Spectra object.

    for h in range(1, nhdu):
        name = hdus[h].header["EXTNAME"]
        if name == "FIBERMAP":
            fmap = encode_table(Table(hdus[h].data, copy=True).as_array())
        elif name == "SCORES":
            scores = encode_table(Table(hdus[h].data, copy=True).as_array())
        else:
            # Find the band based on the name
            mat = re.match(r"(.*)_(.*)", name)
            if mat is None:
                raise RuntimeError("FITS extension name {} does not contain the band".format(name))
            band = mat.group(1).lower()
            type = mat.group(2)
            if band not in bands:
                bands.append(band)
            if type == "WAVELENGTH":
                if wave is None:
                    wave = {}
                wave[band] = native_endian(hdus[h].data.astype(ftype))
            elif type == "FLUX":
                if flux is None:
                    flux = {}
                flux[band] = native_endian(hdus[h].data.astype(ftype))
            elif type == "IVAR":
                if ivar is None:
                    ivar = {}
                ivar[band] = native_endian(hdus[h].data.astype(ftype))
            elif type == "MASK":
                if mask is None:
                    mask = {}
                mask[band] = native_endian(hdus[h].data.astype(np.uint32))
            elif type == "RESOLUTION":
                if res is None:
                    res = {}
                res[band] = native_endian(hdus[h].data.astype(ftype))
            else:
                # this must be an "extra" HDU
                if extra is None:
                    extra = {}
                if band not in extra:
                    extra[band] = {}
                extra[band][type] = native_endian(hdus[h].data.astype(ftype))

    # Construct the Spectra object from the data.  If there are any
    # inconsistencies in the sizes of the arrays read from the file,
    # they will be caught by the constructor.

    spec = Spectra(bands, wave, flux, ivar, mask=mask, resolution_data=res,
        fibermap=fmap, meta=meta, extra=extra, single=single, scores=scores)

    hdus.close()

    return spec
示例#25
0
def read_spectra(infile, single=False):
    """
    Read Spectra object from FITS file.

    This reads data written by the write_spectra function.  A new Spectra
    object is instantiated and returned.

    Args:
        infile (str): path to read
        single (bool): if True, keep spectra as single precision in memory.

    Returns (Spectra):
        The object containing the data read from disk.

    """

    ftype = np.float64
    if single:
        ftype = np.float32

    infile = os.path.abspath(infile)
    if not os.path.isfile(infile):
        raise IOError("{} is not a file".format(infile))

    hdus = fits.open(infile, mode="readonly")
    nhdu = len(hdus)

    # load the metadata.

    meta = dict(hdus[0].header)

    # initialize data objects

    bands = []
    fmap = None
    wave = None
    flux = None
    ivar = None
    mask = None
    res = None
    extra = None
    scores = None

    # For efficiency, go through the HDUs in disk-order.  Use the
    # extension name to determine where to put the data.  We don't
    # explicitly copy the data, since that will be done when constructing
    # the Spectra object.

    for h in range(1, nhdu):
        name = hdus[h].header["EXTNAME"]
        if name == "FIBERMAP":
            fmap = encode_table(Table(hdus[h].data, copy=True).as_array())
        elif name == "SCORES":
            scores = encode_table(Table(hdus[h].data, copy=True).as_array())
        else:
            # Find the band based on the name
            mat = re.match(r"(.*)_(.*)", name)
            if mat is None:
                raise RuntimeError(
                    "FITS extension name {} does not contain the band".format(
                        name))
            band = mat.group(1).lower()
            type = mat.group(2)
            if band not in bands:
                bands.append(band)
            if type == "WAVELENGTH":
                if wave is None:
                    wave = {}
                wave[band] = native_endian(hdus[h].data.astype(ftype))
            elif type == "FLUX":
                if flux is None:
                    flux = {}
                flux[band] = native_endian(hdus[h].data.astype(ftype))
            elif type == "IVAR":
                if ivar is None:
                    ivar = {}
                ivar[band] = native_endian(hdus[h].data.astype(ftype))
            elif type == "MASK":
                if mask is None:
                    mask = {}
                mask[band] = native_endian(hdus[h].data.astype(np.uint32))
            elif type == "RESOLUTION":
                if res is None:
                    res = {}
                res[band] = native_endian(hdus[h].data.astype(ftype))
            else:
                # this must be an "extra" HDU
                if extra is None:
                    extra = {}
                if band not in extra:
                    extra[band] = {}
                extra[band][type] = native_endian(hdus[h].data.astype(ftype))

    # Construct the Spectra object from the data.  If there are any
    # inconsistencies in the sizes of the arrays read from the file,
    # they will be caught by the constructor.

    spec = Spectra(bands,
                   wave,
                   flux,
                   ivar,
                   mask=mask,
                   resolution_data=res,
                   fibermap=fmap,
                   meta=meta,
                   extra=extra,
                   single=single,
                   scores=scores)

    hdus.close()

    return spec
示例#26
0
def read_frame_as_spectra(filename, night=None, expid=None, band=None, single=False):
    """
    Read a FITS file containing a Frame and return a Spectra.

    A Frame file is very close to a Spectra object (by design), and
    only differs by missing the NIGHT and EXPID in the fibermap, as
    well as containing only one band of data.

    Args:
        infile (str): path to read

    Options:
        night (int): the night value to use for all rows of the fibermap.
        expid (int): the expid value to use for all rows of the fibermap.
        band (str): the name of this band.
        single (bool): if True, keep spectra as single precision in memory.

    Returns (Spectra):
        The object containing the data read from disk.

    """
    fr = read_frame(filename)
    if fr.fibermap is None:
        raise RuntimeError("reading Frame files into Spectra only supported if a fibermap exists")

    nspec = len(fr.fibermap)

    if band is None:
        band = fr.meta['camera'][0]

    if night is None:
        night = fr.meta['night']

    if expid is None:
        expid = fr.meta['expid']

    fmap = np.asarray(fr.fibermap.copy())
    fmap = add_columns(fmap,
                       ['NIGHT', 'EXPID', 'TILEID'],
                       [np.int32(night), np.int32(expid), np.int32(fr.meta['TILEID'])],
                       )

    fmap = encode_table(fmap)

    bands = [ band ]

    mask = None
    if fr.mask is not None:
        mask = {band : fr.mask}

    res = None
    if fr.resolution_data is not None:
        res = {band : fr.resolution_data}

    extra = None
    if fr.chi2pix is not None:
        extra = {band : {"CHI2PIX" : fr.chi2pix}}

    spec = Spectra(bands, {band : fr.wave}, {band : fr.flux}, {band : fr.ivar},
        mask=mask, resolution_data=res, fibermap=fmap, meta=fr.meta,
        extra=extra, single=single, scores=fr.scores)

    return spec
示例#27
0
def write_spectra(outfile, spec, units=None):
    """
    Write Spectra object to FITS file.

    This places the metadata into the header of the (empty) primary HDU.
    The first extension contains the fibermap, and then HDUs are created for
    the different data arrays for each band.

    Floating point data is converted to 32 bits before writing.

    Args:
        outfile (str): path to write
        spec (Spectra): the object containing the data
        units (str): optional string to use for the BUNIT key of the flux
            HDUs for each band.

    Returns:
        The absolute path to the file that was written.

    """

    outfile = os.path.abspath(outfile)

    # Create the parent directory, if necessary.
    dir, base = os.path.split(outfile)
    if not os.path.exists(dir):
        os.makedirs(dir)

    # Create HDUs from the data
    all_hdus = fits.HDUList()

    # metadata goes in empty primary HDU
    hdr = fitsheader(spec.meta)
    add_dependencies(hdr)

    all_hdus.append(fits.PrimaryHDU(header=hdr))

    # Next is the fibermap
    fmap = spec.fibermap.copy()
    fmap.meta["EXTNAME"] = "FIBERMAP"
    with warnings.catch_warnings():
        #- nanomaggies aren't an official IAU unit but don't complain
        warnings.filterwarnings('ignore', '.*nanomaggies.*')
        hdu = fits.convenience.table_to_hdu(fmap)

    # Add comments for fibermap columns.
    for i, colname in enumerate(fmap.dtype.names):
        if colname in fibermap_comments:
            key = "TTYPE{}".format(i + 1)
            name = hdu.header[key]
            assert name == colname
            comment = fibermap_comments[name]
            hdu.header[key] = (name, comment)
        else:
            pass
            #print('Unknown comment for {}'.format(colname))

    all_hdus.append(hdu)

    # Now append the data for all bands

    for band in spec.bands:
        hdu = fits.ImageHDU(name="{}_WAVELENGTH".format(band.upper()))
        hdu.header["BUNIT"] = "Angstrom"
        hdu.data = spec.wave[band].astype("f8")
        all_hdus.append(hdu)

        hdu = fits.ImageHDU(name="{}_FLUX".format(band.upper()))
        if units is None:
            hdu.header["BUNIT"] = "10**-17 erg/(s cm2 Angstrom)"
        else:
            hdu.header["BUNIT"] = units
        hdu.data = spec.flux[band].astype("f4")
        all_hdus.append(hdu)

        hdu = fits.ImageHDU(name="{}_IVAR".format(band.upper()))
        if units is None:
            hdu.header["BUNIT"] = '10**+34 (s2 cm4 Angstrom2) / erg2'
        else:
            hdu.header["BUNIT"] = ((u.Unit(
                units, format='fits'))**-2).to_string('fits')
        hdu.data = spec.ivar[band].astype("f4")
        all_hdus.append(hdu)

        if spec.mask is not None:
            # hdu = fits.CompImageHDU(name="{}_MASK".format(band.upper()))
            hdu = fits.ImageHDU(name="{}_MASK".format(band.upper()))
            hdu.data = spec.mask[band].astype(np.uint32)
            all_hdus.append(hdu)

        if spec.resolution_data is not None:
            hdu = fits.ImageHDU(name="{}_RESOLUTION".format(band.upper()))
            hdu.data = spec.resolution_data[band].astype("f4")
            all_hdus.append(hdu)

        if spec.extra is not None:
            for ex in spec.extra[band].items():
                hdu = fits.ImageHDU(name="{}_{}".format(band.upper(), ex[0]))
                hdu.data = ex[1].astype("f4")
                all_hdus.append(hdu)

    if spec.scores is not None:
        scores_tbl = encode_table(spec.scores)  #- unicode -> bytes
        scores_tbl.meta['EXTNAME'] = 'SCORES'
        all_hdus.append(fits.convenience.table_to_hdu(scores_tbl))
        if spec.scores_comments is not None:  # add comments in header
            hdu = all_hdus['SCORES']
            for i in range(1, 999):
                key = 'TTYPE' + str(i)
                if key in hdu.header:
                    value = hdu.header[key]
                    if value in spec.scores_comments.keys():
                        hdu.header[key] = (value, spec.scores_comments[value])

    all_hdus.writeto("{}.tmp".format(outfile), overwrite=True, checksum=True)
    os.rename("{}.tmp".format(outfile), outfile)

    return outfile
示例#28
0
文件: util.py 项目: desihub/desispec
def write_bintable(filename, data, header=None, comments=None, units=None,
                   extname=None, clobber=False, primary_extname='PRIMARY'):
    """Utility function to write a fits binary table complete with
    comments and units in the FITS header too.  DATA can either be
    dictionary, an Astropy Table, a numpy.recarray or a numpy.ndarray.
    """
    from astropy.table import Table
    from desiutil.io import encode_table

    log = get_logger()

    #- Convert data as needed
    if isinstance(data, (np.recarray, np.ndarray, Table)):
        outdata = encode_table(data, encoding='ascii')
    else:
        outdata = encode_table(_dict2ndarray(data), encoding='ascii')

    # hdu = astropy.io.fits.BinTableHDU(outdata, header=header, name=extname)
    hdu = astropy.io.fits.convenience.table_to_hdu(outdata)
    if extname is not None:
        hdu.header['EXTNAME'] = extname
    else:
        log.warning("Table does not have EXTNAME set!")

    if header is not None:
        if isinstance(header, astropy.io.fits.header.Header):
            for key, value in header.items():
                comment = header.comments[key]
                hdu.header[key] = (value, comment)
        else:
            hdu.header.update(header)

    #- Allow comments and units to be None
    if comments is None:
        comments = dict()
    if units is None:
        units = dict()
    #
    # Add comments and units to the *columns* of the table.
    #
    for i in range(1, 999):
        key = 'TTYPE'+str(i)
        if key not in hdu.header:
            break
        else:
            value = hdu.header[key]
            if value in comments:
                hdu.header[key] = (value, comments[value])
            if value in units:
                hdu.header['TUNIT'+str(i)] = (units[value], value+' units')
    #
    # Add checksum cards.
    #
    hdu.add_checksum()

    #- Write the data and header

    if os.path.isfile(filename):
        if not(extname is None and clobber):
            #
            # Always open update mode with memmap=False, but keep the
            # formal check commented out in case we need it in the future.
            #
            memmap = False
            #
            # Check to see if filesystem supports memory-mapping on update.
            #
            # memmap = _supports_memmap(filename)
            # if not memmap:
            #     log.warning("Filesystem does not support memory-mapping!")
            with astropy.io.fits.open(filename, mode='update', memmap=memmap) as hdulist:
                if extname is None:
                    #
                    # In DESI, we should *always* be setting the extname, so this
                    # might never be called.
                    #
                    log.debug("Adding new HDU to %s.", filename)
                    hdulist.append(hdu)
                else:
                    if extname in hdulist:
                        if clobber:
                            log.debug("Replacing HDU with EXTNAME = '%s' in %s.", extname, filename)
                            hdulist[extname] = hdu
                        else:
                            log.warning("Do not modify %s because EXTNAME = '%s' exists.", filename, extname)
                    else:
                        log.debug("Adding new HDU with EXTNAME = '%s' to %s.", extname, filename)
                        hdulist.append(hdu)
            return
    #
    # If we reach this point, we're writing a new file.
    #
    if os.path.isfile(filename):
        log.debug("Overwriting %s.", filename)
    else:
        log.debug("Writing new file %s.", filename)
    hdu0 = astropy.io.fits.PrimaryHDU()
    hdu0.header['EXTNAME'] = primary_extname
    hdulist = astropy.io.fits.HDUList([hdu0, hdu])
    hdulist.writeto(filename, overwrite=clobber, checksum=True)
    return