Beispiel #1
0
def write_flux_calibration(outfile, fluxcalib, header=None):
    """Writes  flux calibration.

    Args:
        outfile : output file name
        fluxcalib : FluxCalib object

    Options:
        header : dict-like object of key/value pairs to include in header
    """
    hx = fits.HDUList()

    hdr = fitsheader(header)
    add_dependencies(hdr)

    hdr['EXTNAME'] = 'FLUXCALIB'
    hdr['BUNIT'] = ('1e+17 cm2 electron s / erg',
                    'i.e. (electron/Angstrom) / (1e-17 erg/s/cm2/Angstrom)')
    hx.append(fits.PrimaryHDU(fluxcalib.calib.astype('f4'), header=hdr))
    hx.append(fits.ImageHDU(fluxcalib.ivar.astype('f4'), name='IVAR'))
    hx.append(fits.CompImageHDU(fluxcalib.mask, name='MASK'))
    hx.append(fits.ImageHDU(fluxcalib.wave.astype('f4'), name='WAVELENGTH'))
    hx[-1].header['BUNIT'] = 'Angstrom'

    hx.writeto(outfile + '.tmp', clobber=True, checksum=True)
    os.rename(outfile + '.tmp', outfile)

    return outfile
Beispiel #2
0
def write_fibermap(outfile, fibermap, header=None):
    """Write fibermap binary table to outfile.

    Args:
        outfile (str): output filename
        fibermap: astropy Table of fibermap data
        header: header data to include in same HDU as fibermap

    Returns:
        write_fibermap (str): full path to filename of fibermap file written.
    """
    outfile = makepath(outfile)

    #- astropy.io.fits incorrectly generates warning about 2D arrays of strings
    #- Temporarily turn off warnings to avoid this; lvmspec.test.test_io will
    #- catch it if the arrays actually are written incorrectly.
    if header is not None:
        hdr = fitsheader(header)
    else:
        hdr = fitsheader(fibermap.meta)

    add_dependencies(hdr)

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        write_bintable(outfile, fibermap, hdr, comments=fibermap_comments,
            extname="FIBERMAP", clobber=True)

    return outfile
Beispiel #3
0
def write_stdstar_models(norm_modelfile,
                         normalizedFlux,
                         wave,
                         fibers,
                         data,
                         header=None):
    """Writes the normalized flux for the best models.

    Args:
        norm_modelfile : output file path
        normalizedFlux : 2D array of flux[nstdstars, nwave]
        wave : 1D array of wavelengths[nwave] in Angstroms
        fibers : 1D array of fiberids for these spectra
        data : meta data table about which templates best fit
    """
    hdr = fitsheader(header)
    add_dependencies(hdr)

    hdr['EXTNAME'] = ('FLUX', '[1e-17 erg/(s cm2 Angstrom)]')
    hdr['BUNIT'] = ('1e-17 erg/(s cm2 Angstrom)', 'Flux units')
    hdu1 = fits.PrimaryHDU(normalizedFlux.astype('f4'), header=hdr)

    hdu2 = fits.ImageHDU(wave.astype('f4'))
    hdu2.header['EXTNAME'] = ('WAVELENGTH', '[Angstroms]')
    hdu2.header['BUNIT'] = ('Angstrom', 'Wavelength units')

    hdu3 = fits.ImageHDU(fibers, name='FIBERS')

    # metadata
    from astropy.io.fits import Column
    cols = []
    for k in data.keys():
        if len(data[k].shape) == 1:
            cols.append(Column(name=k, format='D', array=data[k]))
    tbhdu = fits.BinTableHDU.from_columns(fits.ColDefs(cols), name='METADATA')

    hdulist = fits.HDUList([hdu1, hdu2, hdu3, tbhdu])

    # add coefficients
    if "COEFF" in data:
        hdulist.append(fits.ImageHDU(data["COEFF"], name="COEFF"))

    tmpfile = norm_modelfile + ".tmp"
    hdulist.writeto(tmpfile, clobber=True, checksum=True)
    os.rename(tmpfile, norm_modelfile)
Beispiel #4
0
def write_sky(outfile, skymodel, header=None):
    """Write sky model.

    Args:
        outfile : filename or (night, expid, camera) tuple
        skymodel : SkyModel object, with the following attributes
            wave : 1D wavelength in vacuum Angstroms
            flux : 2D[nspec, nwave] sky flux
            ivar : 2D inverse variance of sky flux
            mask : 2D mask for sky flux
            stat_ivar : 2D inverse variance of sky flux (statistical only)
        header : optional fits header data (fits.Header, dict, or list)
    """
    from lvmutil.depend import add_dependencies
    from .util import fitsheader, makepath

    outfile = makepath(outfile, 'sky')

    #- Convert header to fits.Header if needed
    if header is not None:
        hdr = fitsheader(header)
    else:
        hdr = fitsheader(skymodel.header)

    add_dependencies(hdr)

    hx = fits.HDUList()

    hdr['EXTNAME'] = ('SKY', 'no dimension')
    hx.append( fits.PrimaryHDU(skymodel.flux.astype('f4'), header=hdr) )
    hx.append( fits.ImageHDU(skymodel.ivar.astype('f4'), name='IVAR') )
    hx.append( fits.CompImageHDU(skymodel.mask, name='MASK') )
    hx.append( fits.ImageHDU(skymodel.wave.astype('f4'), name='WAVELENGTH') )
    if skymodel.stat_ivar is not None :
       hx.append( fits.ImageHDU(skymodel.stat_ivar.astype('f4'), name='STATIVAR') )


    hx[-1].header['BUNIT'] = 'Angstrom'

    hx.writeto(outfile+'.tmp', clobber=True, checksum=True)
    os.rename(outfile+'.tmp', outfile)

    return outfile
Beispiel #5
0
def write_fiberflat(outfile, fiberflat, header=None):
    """Write fiberflat object to outfile

    Args:
        outfile: filepath string or (night, expid, camera) tuple
        fiberflat: FiberFlat object
        header: (optional) dict or fits.Header object to use as HDU 0 header

    Returns:
        filepath of file that was written
    """
    outfile = makepath(outfile, 'fiberflat')

    if header is None:
        hdr = fitsheader(fiberflat.header)
    else:
        hdr = fitsheader(header)
    if fiberflat.chi2pdf is not None:
        hdr['chi2pdf'] = float(fiberflat.chi2pdf)

    hdr['EXTNAME'] = 'FIBERFLAT'
    if 'BUNIT' in hdr:
        del hdr['BUNIT']

    add_dependencies(hdr)

    ff = fiberflat  #- shorthand

    hdus = fits.HDUList()
    hdus.append(fits.PrimaryHDU(ff.fiberflat.astype('f4'), header=hdr))
    hdus.append(fits.ImageHDU(ff.ivar.astype('f4'), name='IVAR'))
    hdus.append(fits.CompImageHDU(ff.mask, name='MASK'))
    hdus.append(fits.ImageHDU(ff.meanspec.astype('f4'), name='MEANSPEC'))
    hdus.append(fits.ImageHDU(ff.wave.astype('f4'), name='WAVELENGTH'))
    hdus[-1].header['BUNIT'] = 'Angstrom'

    hdus.writeto(outfile + '.tmp', clobber=True, checksum=True)
    os.rename(outfile + '.tmp', outfile)
    return outfile
Beispiel #6
0
def write_image(outfile, image, meta=None):
    """Writes image object to outfile

    Args:
        outfile : output file string
        image : lvmspec.image.Image object
            (or any object with 2D array attributes image, ivar, mask)

    Optional:
        meta : dict-like object with metadata key/values (e.g. FITS header)
    """

    if meta is not None:
        hdr = fitsheader(meta)
    else:
        hdr = fitsheader(image.meta)

    add_dependencies(hdr)

    hx = fits.HDUList()
    hdu = fits.ImageHDU(image.pix.astype(np.float32), name='IMAGE', header=hdr)
    if 'CAMERA' not in hdu.header:
        hdu.header.append( ('CAMERA', image.camera.lower(), 'Spectograph Camera') )

    if 'RDNOISE' not in hdu.header and np.isscalar(image.readnoise):
        hdu.header.append( ('RDNOISE', image.readnoise, 'Read noise [RMS electrons/pixel]'))

    hx.append(hdu)
    hx.append(fits.ImageHDU(image.ivar.astype(np.float32), name='IVAR'))
    hx.append(fits.CompImageHDU(image.mask.astype(np.int16), name='MASK'))
    if not np.isscalar(image.readnoise):
        hx.append(fits.ImageHDU(image.readnoise.astype(np.float32), name='READNOISE'))

    hx.writeto(outfile+'.tmp', clobber=True, checksum=True)
    os.rename(outfile+'.tmp', outfile)

    return outfile
Beispiel #7
0
def write_spectra(outfile, spec, units=None):
    """
    Write Spectra object to FITS file.

    This places the metadata into the header of the (empty) primary HDU.
    The first extension contains the fibermap, and then HDUs are created for
    the different data arrays for each band.

    Floating point data is converted to 32 bits before writing.

    Args:
        outfile (str): path to write
        spec (Spectra): the object containing the data
        units (str): optional string to use for the BUNIT key of the flux
            HDUs for each band.

    Returns:
        The absolute path to the file that was written.

    """

    outfile = os.path.abspath(outfile)

    # Create the parent directory, if necessary.
    dir, base = os.path.split(outfile)
    if not os.path.exists(dir):
        os.makedirs(dir)

    # Create HDUs from the data
    all_hdus = fits.HDUList()

    # metadata goes in empty primary HDU
    hdr = fitsheader(spec.meta)
    add_dependencies(hdr)

    all_hdus.append(fits.PrimaryHDU(header=hdr))

    # Next is the fibermap
    fmap = spec.fibermap.copy()
    fmap.meta["EXTNAME"] = "FIBERMAP"
    hdu = fits.convenience.table_to_hdu(fmap)

    # Add comments for fibermap columns.
    for i, colname in enumerate(fmap.dtype.names):
        if colname in fibermap_comments:
            key = "TTYPE{}".format(i + 1)
            name = hdu.header[key]
            assert name == colname
            comment = fibermap_comments[name]
            hdu.header[key] = (name, comment)
        else:
            print('Unknown comment for {}'.format(colname))

    all_hdus.append(hdu)

    # Now append the data for all bands

    for band in spec.bands:
        hdu = fits.ImageHDU(name="{}_WAVELENGTH".format(band.upper()))
        hdu.header["BUNIT"] = "Angstrom"
        hdu.data = spec.wave[band].astype("f8")
        all_hdus.append(hdu)

        hdu = fits.ImageHDU(name="{}_FLUX".format(band.upper()))
        if units is None:
            hdu.header["BUNIT"] = "1e-17 erg/(s cm2 Angstrom)"
        else:
            hdu.header["BUNIT"] = units
        hdu.data = spec.flux[band].astype("f4")
        all_hdus.append(hdu)

        hdu = fits.ImageHDU(name="{}_IVAR".format(band.upper()))
        hdu.data = spec.ivar[band].astype("f4")
        all_hdus.append(hdu)

        if spec.mask is not None:
            hdu = fits.CompImageHDU(name="{}_MASK".format(band.upper()))
            hdu.data = spec.mask[band].astype(np.uint32)
            all_hdus.append(hdu)

        if spec.resolution_data is not None:
            hdu = fits.ImageHDU(name="{}_RESOLUTION".format(band.upper()))
            hdu.data = spec.resolution_data[band].astype("f4")
            all_hdus.append(hdu)

        if spec.extra is not None:
            for ex in spec.extra[band].items():
                hdu = fits.ImageHDU(name="{}_{}".format(band.upper(), ex[0]))
                hdu.data = ex[1].astype("f4")
                all_hdus.append(hdu)

    try:
        all_hdus.writeto("{}.tmp".format(outfile),
                         overwrite=True,
                         checksum=True)
    except TypeError:
        all_hdus.writeto("{}.tmp".format(outfile), clobber=True, checksum=True)
    os.rename("{}.tmp".format(outfile), outfile)

    return outfile
Beispiel #8
0
def write_frame(outfile, frame, header=None, fibermap=None, units=None):
    """Write a frame fits file and returns path to file written.

    Args:
        outfile: full path to output file, or tuple (night, expid, channel)
        frame:  lvmspec.frame.Frame object with wave, flux, ivar...

    Optional:
        header: astropy.io.fits.Header or dict to override frame.header
        fibermap: table to store as FIBERMAP HDU

    Returns:
        full filepath of output file that was written

    Note:
        to create a Frame object to pass into write_frame,
        frame = Frame(wave, flux, ivar, resolution_data)
    """
    log = get_logger()
    outfile = makepath(outfile, 'frame')

    if header is not None:
        hdr = fitsheader(header)
    else:
        hdr = fitsheader(frame.meta)

    add_dependencies(hdr)

    # Vette
    diagnosis = frame.vet()
    if diagnosis != 0:
        raise IOError(
            "Frame did not pass simple vetting test. diagnosis={:d}".format(
                diagnosis))

    hdus = fits.HDUList()
    x = fits.PrimaryHDU(frame.flux.astype('f4'), header=hdr)
    x.header['EXTNAME'] = 'FLUX'
    if units is not None:
        units = str(units)
        if 'BUNIT' in hdr and hdr['BUNIT'] != units:
            log.warn('BUNIT {bunit} != units {units}; using {units}'.format(
                bunit=hdr['BUNIT'], units=units))
        x.header['BUNIT'] = units
    hdus.append(x)

    hdus.append(fits.ImageHDU(frame.ivar.astype('f4'), name='IVAR'))
    hdus.append(fits.CompImageHDU(frame.mask, name='MASK'))
    hdus.append(fits.ImageHDU(frame.wave.astype('f8'), name='WAVELENGTH'))
    hdus[-1].header['BUNIT'] = 'Angstrom'
    if frame.resolution_data is not None:
        hdus.append(
            fits.ImageHDU(frame.resolution_data.astype('f4'),
                          name='RESOLUTION'))
    elif frame.wsigma is not None:
        log.debug("Using sigma widths from QUICKRESOLUTION")
        qrimg = fits.ImageHDU(frame.wsigma.astype('f4'),
                              name='QUICKRESOLUTION')
        qrimg.header["NDIAG"] = frame.ndiag
        hdus.append(qrimg)
    if fibermap is not None:
        fibermap = encode_table(fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    elif frame.fibermap is not None:
        fibermap = encode_table(frame.fibermap)  #- unicode -> bytes
        fibermap.meta['EXTNAME'] = 'FIBERMAP'
        hdus.append(fits.convenience.table_to_hdu(fibermap))
    elif frame.spectrograph is not None:
        x.header[
            'FIBERMIN'] = 500 * frame.spectrograph  # Hard-coded (as in lvmspec.frame)
    else:
        log.error(
            "You are likely writing a frame without sufficient fiber info")

    if frame.chi2pix is not None:
        hdus.append(fits.ImageHDU(frame.chi2pix.astype('f4'), name='CHI2PIX'))

    hdus.writeto(outfile + '.tmp', clobber=True, checksum=True)
    os.rename(outfile + '.tmp', outfile)

    return outfile
Beispiel #9
0
def write_raw(filename, rawdata, header, camera=None, primary_header=None):
    '''
    Write raw pixel data to a DESI raw data file

    Args:
        filename : file name to write data; if this exists, append a new HDU
        rawdata : 2D ndarray of raw pixel data including overscans
        header : dict-like object or fits.Header with keywords
            CCDSECx, BIASSECx, DATASECx where x=1,2,3, or 4

    Options:
        camera : b0, r1 .. z9 - override value in header
        primary_header : header to write in HDU0 if filename doesn't yet exist

    The primary utility of this function over raw fits calls is to ensure
    that all necessary keywords are present before writing the file.
    CCDSECx, BIASSECx, DATASECx where x=1,2,3, or 4
    DATE-OBS, GAINx and RDNOISEx will generate a non-fatal warning if missing
    '''
    log = get_logger()

    header = lvmspec.io.util.fitsheader(header)
    primary_header = lvmspec.io.util.fitsheader(primary_header)

    if rawdata.dtype not in (np.int16, np.int32, np.int64):
        message = 'dtype {} not supported for raw data'.format(rawdata.dtype)
        log.fatal(message)
        raise ValueError(message)

    fail_message = ''
    for required_key in ['DOSVER', 'FEEVER', 'DETECTOR']:
        if required_key not in primary_header:
            if required_key in header:
                primary_header[required_key] = header[required_key]
            else:
                fail_message = fail_message + \
                    'Keyword {} must be in header or primary_header\n'.format(required_key)
    if fail_message != '':
        raise ValueError(fail_message)

    #- Check required keywords before writing anything
    missing_keywords = list()
    if camera is None and 'CAMERA' not in header:
        log.error("Must provide camera keyword or header['CAMERA']")
        missing_keywords.append('CAMERA')

    for amp in ['1', '2', '3', '4']:
        for prefix in ['CCDSEC', 'BIASSEC', 'DATASEC']:
            keyword = prefix + amp
            if keyword not in header:
                log.error('Missing keyword ' + keyword)
                missing_keywords.append(keyword)

    #- Missing DATE-OBS is warning but not error
    if 'DATE-OBS' not in primary_header:
        if 'DATE-OBS' in header:
            primary_header['DATE-OBS'] = header['DATE-OBS']
        else:
            log.warning('missing keyword DATE-OBS')

    #- Missing GAINx is warning but not error
    for amp in ['1', '2', '3', '4']:
        keyword = 'GAIN' + amp
        if keyword not in header:
            log.warning('Gain keyword {} missing; using 1.0'.format(keyword))
            header[keyword] = 1.0

    #- Missing RDNOISEx is warning but not error
    for amp in ['1', '2', '3', '4']:
        keyword = 'RDNOISE' + amp
        if keyword not in header:
            log.warning('Readnoise keyword {} missing'.format(keyword))

    #- Stop if any keywords are missing
    if len(missing_keywords) > 0:
        raise KeyError('missing required keywords {}'.format(missing_keywords))

    #- Set EXTNAME=camera
    if camera is not None:
        header['CAMERA'] = camera.lower()
        extname = camera.upper()
    else:
        if header['CAMERA'] != header['CAMERA'].lower():
            log.warning('Converting CAMERA {} to lowercase'.format(
                header['CAMERA']))
            header['CAMERA'] = header['CAMERA'].lower()
        extname = header['CAMERA'].upper()

    header['INHERIT'] = True

    #- fits.CompImageHDU doesn't know how to fill in default keywords, so
    #- temporarily generate an uncompressed HDU to get those keywords
    header = fits.ImageHDU(rawdata, header=header, name=extname).header

    #- Bizarrely, compression of 64-bit integers isn't supported.
    #- downcast to 32-bit if that won't lose precision.
    #- Real raw data should be 32-bit or 16-bit anyway
    if rawdata.dtype == np.int64:
        if np.max(np.abs(rawdata)) < 2**31:
            rawdata = rawdata.astype(np.int32)

    if rawdata.dtype in (np.int16, np.int32):
        dataHDU = fits.CompImageHDU(rawdata, header=header, name=extname)
    elif rawdata.dtype == np.int64:
        log.warning(
            'Image compression not supported for 64-bit; writing uncompressed')
        dataHDU = fits.ImageHDU(rawdata, header=header, name=extname)
    else:
        log.error("How did we get this far with rawdata dtype {}?".format(
            rawdata.dtype))
        dataHDU = fits.ImageHDU(rawdata, header=header, name=extname)

    #- Actually write or update the file
    if os.path.exists(filename):
        hdus = fits.open(filename, mode='append', memmap=False)
        if extname in hdus:
            hdus.close()
            raise ValueError('Camera {} already in {}'.format(
                camera, filename))
        else:
            hdus.append(dataHDU)
            hdus.flush()
            hdus.close()
    else:
        hdus = fits.HDUList()
        add_dependencies(primary_header)
        hdus.append(fits.PrimaryHDU(None, header=primary_header))
        hdus.append(dataHDU)
        hdus.writeto(filename)