Example #1
0
def fit_modified_blackbody_to_imagecube(
    image_cube,
    outheader,
    wavelengths=[70, 160, 250, 350, 500],
    error_scaling=0.2,
    error_cube=None,
    pixelfitter=None,
    ncores=4,
    clobber=True,
    integral=False,
    out_prefix="",
):
    """
    Fit a modified blackbody to each pixel in an image cube.  Writes the
    results to files of the form ``{out_prefix}+T.fits``, ``{out_prefix}+beta.fits``, 
    ``{out_prefix}+N.fits``,  and optionally ``{out_prefix}+integral.fits``.

    Parameters
    ----------
    image_cube : `~numpy.ndarray`
        A cube constructed from the individual wavelengths of the Herschel
        image.  Should have units of MJy (not MJy/sr).
    wavelengths : list
        The wavelengths, in microns, to include in the fit
    error_scaling : float or None
        The amount to scale the input fluxes by to determine the errors
    error_cube : None or `~numpy.ndarray`
        Alternative to ``error_scaling``.  A cube of errors the same size as
        the image_cube.
    pixelfitter : :class:`PixelFitter` or None
        An instance of the :class:`PixelFitter` class to use for the fitting
        (this is how guesses are specified).  If None, will use defaults.
    ncores : int
        OPTIONAL / NOT PRESENTLY IMPLEMENTED
        Allows parallelization
    clobber : bool
        Overwrite existing output files?
    integral : bool
        Also include the integral of the modified blackbody, e.g. for
        luminosity determination?  This increases the execution time by
        a factor of 2
    out_prefix : str
        A prefix to prepend to the output file names
        
    Returns
    -------
    t_hdu,b_hdu,n_hdu : :class:`~astropy.io.fits.HDUList`
        HDUlists incorporating the best-fit values as the
        `~astropy.io.fits.PrimaryHDU` image and the errors
        as `~astropy.io.fits.ImageHDU` s in the first (ERROR)
        extension
    int_hdu : :class:`~astropy.io.fits.PrimaryHDU`
        (optional) An HDU containing an image of the integral
        in :math:`erg/s/cm^2`
    """

    if pixelfitter is None:
        pixelfitter = PixelFitter()

    wavelengths_sorted = sorted(wavelengths)

    # Only fit pixels with no NaNs
    ok_to_fit = (np.isnan(image_cube)).max(axis=0) == 0
    okcount = np.count_nonzero(ok_to_fit)
    if okcount == 0:
        raise ValueError("No valid pixels found.")
    okx, oky = np.where(ok_to_fit)

    frequencies = u.Quantity(wavelengths_sorted, u.um).to(u.Hz, u.spectral())

    timg, bimg, nimg = [np.empty(ok_to_fit.shape) + np.nan for ii in range(3)]
    terr, berr, nerr = [np.empty(ok_to_fit.shape) + np.nan for ii in range(3)]
    if integral:
        intimg = np.empty(ok_to_fit.shape) + np.nan

    pb = ProgressBar(okcount)

    def fitter(xy):
        x, y = xy
        if error_cube is not None:
            error_spec = error_cube[:, x, y]
        else:
            error_spec = image_cube[:, x, y] * error_scaling
        vals, errs = pixelfitter(
            frequencies,
            u.Quantity(image_cube[:, x, y], u.MJy),
            u.Quantity(error_spec, u.MJy),
        )
        timg[x, y] = vals[0]
        bimg[x, y] = vals[1]
        nimg[x, y] = vals[2]

        terr[x, y] = errs[0]
        berr[x, y] = errs[1]
        nerr[x, y] = errs[2]

        if integral:
            integ = pixelfitter.integral(1 * u.cm, 1 * u.um)
            vals = vals + (integ, )
            intimg[x, y] = integ

        pb.update()

        return vals, errs, xy

    if ncores > 1:
        result = parallel_map(fitter, zip(okx, oky), numcores=ncores)
        # need to unpack results now
        for vals, errs, xy in result:
            x, y = xy
            timg[x, y] = vals[0]
            bimg[x, y] = vals[1]
            nimg[x, y] = vals[2]

            terr[x, y] = errs[0]
            berr[x, y] = errs[1]
            nerr[x, y] = errs[2]

            if integral:
                intimg[x, y] = vals[3]
    else:
        for xy in zip(okx, oky):
            fitter(xy)

    t_hdu = fits.HDUList([
        fits.PrimaryHDU(data=timg, header=outheader),
        fits.ImageHDU(data=terr, header=outheader, name='ERROR')
    ])
    t_hdu[0].header['BUNIT'] = 'K'
    t_hdu[1].header['BUNIT'] = 'K'
    b_hdu = fits.HDUList([
        fits.PrimaryHDU(data=bimg, header=outheader),
        fits.ImageHDU(data=berr, header=outheader, name='ERROR')
    ])
    b_hdu[0].header['BUNIT'] = ''
    b_hdu[1].header['BUNIT'] = ''
    n_hdu = fits.HDUList([
        fits.PrimaryHDU(data=nimg, header=outheader),
        fits.ImageHDU(data=nerr, header=outheader, name='ERROR')
    ])
    n_hdu[0].header['BUNIT'] = 'cm^(-2)'
    n_hdu[1].header['BUNIT'] = 'cm^(-2)'

    t_hdu.writeto(out_prefix + 'T.fits', clobber=clobber)
    b_hdu.writeto(out_prefix + 'beta.fits', clobber=clobber)
    n_hdu.writeto(out_prefix + 'N.fits', clobber=clobber)

    if integral:
        int_hdu = fits.PrimaryHDU(data=intimg, header=outheader)
        int_hdu.header['BUNIT'] = 'erg*s^(-1)*cm^(-2)'
        int_hdu.writeto(out_prefix + 'integral.fits', clobber=clobber)
        return t_hdu, b_hdu, n_hdu, int_hdu
    else:
        return t_hdu, b_hdu, n_hdu
Example #2
0
def wavemap(hdu,
            soldict,
            caltype='line',
            function='poly',
            order=3,
            blank=0,
            nearest=False,
            array_only=False,
            clobber=True,
            log=None,
            verbose=True):
    """Read in an image and a set of wavlength solutions.  Calculate the best
       wavelength solution for a given dataset and then apply that data set to the
       image

     return
    """

    # set up the time of the observation
    dateobs = saltkey.get('DATE-OBS', hdu[0])
    utctime = saltkey.get('TIME-OBS', hdu[0])
    exptime = saltkey.get('EXPTIME', hdu[0])
    instrume = saltkey.get('INSTRUME', hdu[0]).strip()
    grating = saltkey.get('GRATING', hdu[0]).strip()
    if caltype == 'line':
        grang = saltkey.get('GRTILT', hdu[0])
        arang = saltkey.get('CAMANG', hdu[0])
    else:
        grang = saltkey.get('GR-ANGLE', hdu[0])
        arang = saltkey.get('AR-ANGLE', hdu[0])
    filtername = saltkey.get('FILTER', hdu[0]).strip()
    slitname = saltkey.get('MASKID', hdu[0])
    slit = st.getslitsize(slitname)
    xbin, ybin = saltkey.ccdbin(hdu[0])

    timeobs = sr.enterdatetime('%s %s' % (dateobs, utctime))

    # check to see if there is more than one solution
    if caltype == 'line':
        if len(soldict) == 1:
            sol = soldict.keys()[0]
            slitid = None
            if not sr.matchobservations(soldict[sol], instrume, grating, grang,
                                        arang, filtername, slitid):
                msg = 'Observations do not match setup for transformation but using the solution anyway'
                if log:
                    log.warning(msg)

    for i in range(1, len(hdu)):
        if hdu[i].name == 'SCI':
            if log:
                log.message('Correcting extension %i' % i)
            istart = int(0.5 * len(hdu[i].data))

            # open up the data
            # set up the xarr and initial wavlength solution
            xarr = np.arange(len(hdu[i].data[istart]), dtype='int64')

            # get the slitid
            try:
                slitid = saltkey.get('SLITNAME', hdu[i])
            except:
                slitid = None

            #check to see if wavext is already there and if so, then check update
            #that for the transformation from xshift to wavelength
            if saltkey.found('WAVEXT', hdu[i]):
                w_ext = saltkey.get('WAVEXT', hdu[i]) - 1
                wavemap = hdu[w_ext].data
                function, order, coef = sr.findlinesol(
                    soldict, istart, nearest, timeobs, exptime, instrume,
                    grating, grang, arang, filtername, slitid, xarr)
                ws = WavelengthSolution.WavelengthSolution(xarr,
                                                           xarr,
                                                           function=function,
                                                           order=order)
                ws.set_coef(coef)
                for j in range(len(hdu[i].data)):
                    wavemap[j, :] = ws.value(wavemap[j, :])
                if array_only: return wavemap
                hdu[w_ext].data = wavemap
                continue

            # set up a wavelength solution -- still in here for testing MOS data
            try:
                w_arr = sr.findsol(xarr, soldict, istart, caltype, nearest,
                                   timeobs, exptime, instrume, grating, grang,
                                   arang, filtername, slit, xbin, ybin, slitid,
                                   function, order)
            except SALTSpecError as e:
                if slitid:
                    msg = 'SLITID %s: %s' % (slitid, e)
                    if log:
                        log.warning(msg)
                    continue
                else:
                    raise SALTSpecError(e)

            if w_arr is None:
                w_arr = sr.findsol(xarr, soldict, istart, 'rss', nearest,
                                   timeobs, exptime, instrume, grating, grang,
                                   arang, filtername, slit, xbin, ybin, slitid,
                                   function, order)

            # for each line in the data, determine the wavelength solution
            # for a given line in the image
            wavemap = np.zeros_like(hdu[i].data)
            for j in range(len(hdu[i].data)):
                # find the wavelength solution for the data
                w_arr = sr.findsol(xarr, soldict, j, caltype, nearest, timeobs,
                                   exptime, instrume, grating, grang, arang,
                                   filtername, slit, xbin, ybin, slitid,
                                   function, order)
                if w_arr is not None: wavemap[j, :] = w_arr
            if array_only: return wavemap

            # write out the oimg
            hduwav = fits.ImageHDU(data=wavemap,
                                   header=hdu[i].header,
                                   name='WAV')
            hdu.append(hduwav)
            saltkey.new('WAVEXT',
                        len(hdu) - 1, 'Extension for Wavelength Map', hdu[i])

    return hdu
Example #3
0
    def writeFITS(self,
                  template,
                  sciarr,
                  whtarr,
                  ctxarr=None,
                  versions=None,
                  overwrite=yes,
                  blend=True,
                  virtual=False):
        """
        Generate PyFITS objects for each output extension
        using the file given by 'template' for populating
        headers.

        The arrays will have the size specified by 'shape'.
        """
        if not isinstance(template, list):
            template = [template]

        if fileutil.findFile(self.output):
            if overwrite:
                log.info('Deleting previous output product: %s' % self.output)
                fileutil.removeFile(self.output)

            else:
                log.warning('Output file %s already exists and overwrite not '
                            'specified!' % self.output)
                log.error('Quitting... Please remove before resuming '
                          'operations.')
                raise IOError

        # initialize output value for this method
        outputFITS = {}
        # Default value for NEXTEND when 'build'== True
        nextend = 3
        if not self.build:
            nextend = 0
            if self.outweight:
                if overwrite:
                    if fileutil.findFile(self.outweight):
                        log.info('Deleting previous output WHT product: %s' %
                                 self.outweight)
                    fileutil.removeFile(self.outweight)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outweight)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError

            if self.outcontext:
                if overwrite:
                    if fileutil.findFile(self.outcontext):
                        log.info('Deleting previous output CTX product: %s' %
                                 self.outcontext)
                    fileutil.removeFile(self.outcontext)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outcontext)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError

        # Get default headers from multi-extension FITS file
        # If only writing out single drizzle product, blending needs to be
        # forced off as there is only 1 input to report, no blending needed
        if self.single:
            blend = False

        # If input data is not in MEF FITS format, it will return 'None'
        # and those headers will have to be generated from drizzle output
        # file FITS headers.
        # NOTE: These are HEADER objects, not HDUs
        #prihdr,scihdr,errhdr,dqhdr = getTemplates(template)
        self.fullhdrs, intab = getTemplates(template, blend=False)

        newhdrs, newtab = getTemplates(template, blend=blend)
        if newtab is not None: nextend += 1  # account for new table extn

        prihdr = newhdrs[0]
        scihdr = newhdrs[1]
        errhdr = newhdrs[2]
        dqhdr = newhdrs[3]

        # Setup primary header as an HDU ready for appending to output FITS file
        prihdu = fits.PrimaryHDU(header=prihdr, data=None)

        # Start by updating PRIMARY header keywords...
        prihdu.header.set('EXTEND', value=True, after='NAXIS')
        prihdu.header['NEXTEND'] = nextend
        prihdu.header['FILENAME'] = self.output
        prihdu.header['PROD_VER'] = 'DrizzlePac {}'.format(version.__version__)

        # Update the ROOTNAME with the new value as well
        _indx = self.output.find('_drz')
        if _indx < 0:
            rootname_val = self.output
        else:
            rootname_val = self.output[:_indx]
        prihdu.header['ROOTNAME'] = rootname_val

        # Get the total exposure time for the image
        # If not calculated by PyDrizzle and passed through
        # the pardict, then leave value from the template image.
        if self.texptime:
            prihdu.header['EXPTIME'] = self.texptime
            prihdu.header.set('TEXPTIME', value=self.texptime, after='EXPTIME')
            prihdu.header['EXPSTART'] = self.expstart
            prihdu.header['EXPEND'] = self.expend

        #Update ASN_MTYPE to reflect the fact that this is a product
        # Currently hard-wired to always output 'PROD-DTH' as MTYPE
        prihdu.header['ASN_MTYP'] = 'PROD-DTH'

        # Update DITHCORR calibration keyword if present
        # Remove when we can modify FITS headers in place...
        if 'DRIZCORR' in prihdu.header:
            prihdu.header['DRIZCORR'] = 'COMPLETE'
        if 'DITHCORR' in prihdu.header:
            prihdu.header['DITHCORR'] = 'COMPLETE'

        prihdu.header['NDRIZIM'] = (len(self.parlist),
                                    'Drizzle, No. images drizzled onto output')

        # Only a subset of these keywords makes sense for the new WCS based
        # transformations. They need to be reviewed to decide what to keep
        # and what to leave out.
        if not self.blot:
            self.addDrizKeywords(prihdu.header, versions)

        if scihdr:
            try:
                del scihdr['OBJECT']
            except KeyError:
                pass

            if 'CCDCHIP' in scihdr: scihdr['CCDCHIP'] = '-999'
            if 'NCOMBINE' in scihdr:
                scihdr['NCOMBINE'] = self.parlist[0]['nimages']

            # If BUNIT keyword was found and reset, then
            bunit_last_kw = self.find_kwupdate_location(scihdr, 'bunit')
            if self.bunit is not None:
                comment_str = "Units of science product"
                if self.bunit.lower()[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                scihdr.set('BUNIT',
                           value=self.bunit,
                           comment=comment_str,
                           after=bunit_last_kw)
            else:
                # check to see whether to update already present BUNIT comment
                if 'bunit' in scihdr and scihdr['bunit'].lower(
                )[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                    scihdr.set('BUNIT',
                               value=scihdr['bunit'],
                               comment=comment_str,
                               after=bunit_last_kw)

            # Add WCS keywords to SCI header
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(scihdr, 'CD1_1')
                addWCSKeywords(self.wcs,
                               scihdr,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
                # Recompute this after removing distortion kws
                pre_wcs_kw = self.find_kwupdate_location(scihdr, 'CD1_1')

        ##########
        # Now, build the output file
        ##########
        if self.build:
            print('-Generating multi-extension output file: ', self.output)
            fo = fits.HDUList()

            # Add primary header to output file...
            fo.append(prihdu)

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=sciarr,
                                        header=scihdr,
                                        name=EXTLIST[0])
            else:
                hdu = fits.ImageHDU(data=sciarr,
                                    header=scihdr,
                                    name=EXTLIST[0])
            last_kw = self.find_kwupdate_location(scihdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='SCI', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            fo.append(hdu)

            # Build WHT extension here, if requested...
            if errhdr:
                errhdr['CCDCHIP'] = '-999'

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=whtarr,
                                        header=errhdr,
                                        name=EXTLIST[1])
            else:
                hdu = fits.ImageHDU(data=whtarr,
                                    header=errhdr,
                                    name=EXTLIST[1])
            last_kw = self.find_kwupdate_location(errhdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='WHT', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header, 'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,
                               hdu.header,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
            fo.append(hdu)

            # Build CTX extension here
            # If there is only 1 plane, write it out as a 2-D extension
            if self.outcontext:
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr
            else:
                _ctxarr = None

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=_ctxarr,
                                        header=dqhdr,
                                        name=EXTLIST[2])
            else:
                hdu = fits.ImageHDU(data=_ctxarr,
                                    header=dqhdr,
                                    name=EXTLIST[2])
            last_kw = self.find_kwupdate_location(dqhdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='CTX', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')

            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header, 'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,
                               hdu.header,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
            fo.append(hdu)

            # remove all alternate WCS solutions from headers of this product
            wcs_functions.removeAllAltWCS(fo, [1])

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual:
                print('Writing out to disk:', self.output)
                # write out file to disk
                fo.writeto(self.output)
                fo.close()
                del fo, hdu
                fo = None
            # End 'if not virtual'
            outputFITS[self.output] = fo

        else:
            print('-Generating simple FITS output: %s' % self.outdata)

            fo = fits.HDUList()
            hdu_header = prihdu.header.copy()
            del hdu_header['nextend']

            # Append remaining unique header keywords from template DQ
            # header to Primary header...
            if scihdr:
                for _card in scihdr.cards:
                    if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu_header:
                        hdu_header.append(_card)
            for kw in ['PCOUNT', 'GCOUNT']:
                try:
                    del kw
                except KeyError:
                    pass
            hdu_header['filename'] = self.outdata

            if self.compress:
                hdu = fits.CompImageHDU(data=sciarr, header=hdu_header)
                wcs_ext = [1]
            else:
                hdu = fits.ImageHDU(data=sciarr, header=hdu_header)
                wcs_ext = [0]

            # explicitly set EXTEND to FALSE for simple FITS files.
            dim = len(sciarr.shape)
            hdu.header.set('extend', value=False, after='NAXIS%s' % dim)

            # Add primary header to output file...
            fo.append(hdu)

            # remove all alternate WCS solutions from headers of this product

            logging.disable(logging.INFO)
            wcs_functions.removeAllAltWCS(fo, wcs_ext)
            logging.disable(logging.NOTSET)

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual:
                print('Writing out image to disk:', self.outdata)
                # write out file to disk
                fo.writeto(self.outdata)
                del fo, hdu
                fo = None
            # End 'if not virtual'
            outputFITS[self.outdata] = fo

            if self.outweight and whtarr is not None:
                # We need to build new PyFITS objects for each WHT array
                fwht = fits.HDUList()

                if errhdr:
                    errhdr['CCDCHIP'] = '-999'

                if self.compress:
                    hdu = fits.CompImageHDU(data=whtarr, header=prihdu.header)
                else:
                    hdu = fits.ImageHDU(data=whtarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if errhdr:
                    for _card in errhdr.cards:
                        if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu.header:
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outweight
                hdu.header['CCDCHIP'] = '-999'
                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(
                        hdu.header, 'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,
                                   hdu.header,
                                   blot=self.blot,
                                   single=self.single,
                                   after=pre_wcs_kw)

                # Add primary header to output file...
                fwht.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fwht, wcs_ext)

                if not virtual:
                    print('Writing out image to disk:', self.outweight)
                    fwht.writeto(self.outweight)
                    del fwht, hdu
                    fwht = None
                # End 'if not virtual'
                outputFITS[self.outweight] = fwht

            # If a context image was specified, build a PyFITS object
            # for it as well...
            if self.outcontext and ctxarr is not None:
                fctx = fits.HDUList()

                # If there is only 1 plane, write it out as a 2-D extension
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr

                if self.compress:
                    hdu = fits.CompImageHDU(data=_ctxarr, header=prihdu.header)
                else:
                    hdu = fits.ImageHDU(data=_ctxarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if dqhdr:
                    for _card in dqhdr.cards:
                        if ((_card.keyword not in RESERVED_KEYS)
                                and _card.keyword not in hdu.header):
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outcontext
                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(
                        hdu.header, 'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,
                                   hdu.header,
                                   blot=self.blot,
                                   single=self.single,
                                   after=pre_wcs_kw)

                fctx.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fctx, wcs_ext)
                if not virtual:
                    print('Writing out image to disk:', self.outcontext)
                    fctx.writeto(self.outcontext)
                    del fctx, hdu
                    fctx = None
                # End 'if not virtual'

                outputFITS[self.outcontext] = fctx

        return outputFITS
        else:
            beam = NP.hstack((beam, data.reshape(-1,1)))

        beam = beam / NP.max(beam, axis=0, keepdims=True)

    outdata += [beam]
    schemes += [scheme]
    npixs += [beam[:,0].size]
    nsides += [HP.npix2nside(beam[:,0].size)]

outfile = rootdir + beams_dir + '{0[0]}_{0[1]}'.format(fnames[0].split('_')[:2])+suffix
hdulist = []
hdulist += [fits.PrimaryHDU()]
hdulist[0].header['EXTNAME'] = 'PRIMARY'
hdulist[0].header['NPOL'] = (len(pols), 'Number of polarizations')
hdulist[0].header['SOURCE'] = ('HERA-CST', 'Source of data')

for pi,pol in enumerate(pols):
    hdu = fits.ImageHDU(outdata[pi], name='BEAM_{0}'.format(pol))
    hdu.header['PIXTYPE'] = ('HEALPIX', 'Type of pixelization')
    hdu.header['ORDERING'] = (schemes[pi], 'Pixel ordering scheme, either RING or NESTED')
    hdu.header['NSIDE'] = (nsides[pi], 'NSIDE parameter of HEALPIX')
    hdu.header['NPIX'] = (npixs[pi], 'Number of HEALPIX pixels')
    hdu.header['FIRSTPIX'] = (0, 'First pixel # (0 based)')
    hdu.header['LASTPIX'] = (npixs[pi]-1, 'Last pixel # (0 based)')
    hdulist += [hdu]
    hdulist += [fits.ImageHDU(frequencies[pi], name='FREQS_{0}'.format(pol))]

outhdu = fits.HDUList(hdulist)
outhdu.writeto(outfile, clobber=True)
Example #5
0
def fetch_cube(name,
               h5file,
               version='',
               colour='',
               getCube=True,
               getRSS=False,
               getAll=False,
               overwrite=False):
    # ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~
    """ A tool to fetch a datacube in FITS format. 

    name      [str]  The name of the SAMI target required. 
    h5file    [str]  The SAMI archive file from which to export. 
    version   [str]  Data version sought. Latest is default. 
    colour    [str]  Colour-specific export. Set to 'B' or 'R'. 
    overwrite [boo]  Overwrite output file as default. 
    """

    # Open HDF5 file and run a series of tests and diagnostics.
    hdf = h5.File(h5file, 'r')

    # Check for SAMI formatting.
    SAMIformatted = checkSAMIformat(hdf)

    # Convert SAMI ID to string
    if name is not str:
        name = str(name)

    # Get the data version.
    if version == '':
        version = getVersion(h5file, hdf, version)

    # Get target group and observation type.
    g_target, obstype = getTargetGroup(hdf, name, version)

    # Check for monochrome output
    if colour == '':
        colour = ['B', 'R']

    for col in range(len(colour)):

        # Look for cubes:
        cubeIsComplete = completeCube(hdf, colour[col], g_target)

        # Set name for output file (if not set):
        #if outfile == '':
        outfile = defOutfile(hdf, name, colour[col], overwrite)

        # Data is primary HDU, VAR and WHT are extra HDUs.
        data = g_target[colour[col] + '_Cube_Data']
        var = g_target[colour[col] + '_Cube_Variance']
        wht = g_target[colour[col] + '_Cube_Weight']

        # Construct headers.
        hdr1 = makeHead(data)
        hdr2 = makeHead(var)
        hdr3 = makeHead(wht)

        # And now set up the Image Data Units.
        hdu_c1 = pf.PrimaryHDU(np.array(data), hdr1)
        hdu_c2 = pf.ImageHDU(np.array(var), name='VARIANCE', header=hdr2)
        hdu_c3 = pf.ImageHDU(np.array(wht), name='WEIGHT', header=hdr3)

        hdulist = pf.HDUList([hdu_c1, hdu_c2, hdu_c3])
        hdulist.writeto(outfile, clobber=overwrite)

        hdulist.close()

    # close h5file and end process
    hdf.close()
Example #6
0
def save_im_fits(im, fname, mjd=False, time=False):
    """Save image data to a fits file.

       Args:
            im (Image): image object
            fname (str): path to output fits file
            mjd (int): MJD of saved image
            time (float): UTC time of saved image

       Returns:
    """

    # Transform to Stokes parameters:
    if (im.polrep != 'stokes') or (im.pol_prim != 'I'):
        im = im.switch_polrep(polrep_out='stokes', pol_prim_out=None)

    # Create header and fill in some values
    header = fits.Header()
    header['OBJECT'] = im.source
    header['CTYPE1'] = 'RA---SIN'
    header['CTYPE2'] = 'DEC--SIN'
    header['CDELT1'] = -im.psize / ehc.DEGREE
    header['CDELT2'] = im.psize / ehc.DEGREE
    header['OBSRA'] = im.ra * 180 / 12.
    header['OBSDEC'] = im.dec
    header['FREQ'] = im.rf

    # TODO these are the default values for centered images
    # TODO support for arbitrary CRPIX?
    header['CRPIX1'] = im.xdim / 2. + .5
    header['CRPIX2'] = im.ydim / 2. + .5

    if not mjd:
        mjd = float(im.mjd)
    if not time:
        time = im.time
    mjd += (time / 24.)

    header['MJD'] = float(mjd)
    header['TELESCOP'] = 'VLBI'
    header['BUNIT'] = 'JY/PIXEL'
    header['STOKES'] = 'I'

    # Create the fits image
    image = np.reshape(im.imvec, (im.ydim, im.xdim))[::-1, :]  # flip y axis!
    hdu = fits.PrimaryHDU(image, header=header)
    hdulist = [hdu]
    if len(im.qvec):
        qimage = np.reshape(im.qvec, (im.xdim, im.ydim))[::-1, :]
        uimage = np.reshape(im.uvec, (im.xdim, im.ydim))[::-1, :]
        header['STOKES'] = 'Q'
        hduq = fits.ImageHDU(qimage, name='Q', header=header)
        header['STOKES'] = 'U'
        hduu = fits.ImageHDU(uimage, name='U', header=header)
        hdulist = [hdu, hduq, hduu]
    if len(im.vvec):
        vimage = np.reshape(im.vvec, (im.xdim, im.ydim))[::-1, :]
        header['STOKES'] = 'V'
        hduv = fits.ImageHDU(vimage, name='V', header=header)
        hdulist.append(hduv)

    hdulist = fits.HDUList(hdulist)

    # Save fits
    hdulist.writeto(fname, overwrite=True)

    return
Example #7
0
def writefits(cleanFDF,
              ccArr,
              iterCountArr,
              residFDF,
              headtemp,
              nBits=32,
              prefixOut="",
              outDir="",
              write_separate_FDF=False,
              verbose=True,
              log=print):
    """Write data to disk in FITS


    Output files:
        Default:
            FDF_clean.fits: RMCLEANed FDF, in 3 extensions: Q,U, and PI.
            FDF_CC.fits: RMCLEAN components, in 3 extensions: Q,U, and PI.
            CLEAN_nIter.fits: RMCLEAN iterations.

        write_seperate_FDF=True:
            FDF_clean_real.fits and FDF_CC.fits are split into
            three constituent components:
                FDF_clean_real.fits: Stokes Q
                FDF_clean_im.fits: Stokes U
                FDF_clean_tot.fits: Polarized Intensity (sqrt(Q^2+U^2))
                FDF_CC_real.fits: Stokes Q
                FDF_CC_im.fits: Stokes U
                FDF_CC_tot.fits: Polarized Intensity (sqrt(Q^2+U^2))
                CLEAN_nIter.fits: RMCLEAN iterations.
    Args:
        cleanFDF (ndarray): Cube of RMCLEANed FDFs.
        ccArr (ndarray): Cube of RMCLEAN components (i.e. the model).
        iterCountArr (ndarray): Cube of number of RMCLEAN iterations.
        residFDF (ndarray): Cube of residual RMCLEANed FDFs.

    Kwargs:
        prefixOut (str): Prefix for filenames.
        outDir (str): Directory to save files.
        write_seperate_FDF (bool): Write Q, U, and PI separately?
        verbose (bool): Verbosity.
        log (function): Which logging function to use.
    """
    # Default data types
    dtFloat = "float" + str(nBits)
    dtComplex = "complex" + str(2 * nBits)

    if outDir == '':  #To prevent code breaking if file is in current directory
        outDir = '.'
    # Save the clean FDF
    if not write_separate_FDF:
        fitsFileOut = outDir + "/" + prefixOut + "FDF_clean.fits"
        if (verbose): log("> %s" % fitsFileOut)
        hdu0 = pf.PrimaryHDU(cleanFDF.real.astype(dtFloat), headtemp)
        hdu1 = pf.ImageHDU(cleanFDF.imag.astype(dtFloat), headtemp)
        hdu2 = pf.ImageHDU(np.abs(cleanFDF).astype(dtFloat), headtemp)
        hduLst = pf.HDUList([hdu0, hdu1, hdu2])
        hduLst.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        hduLst.close()
    else:
        hdu0 = pf.PrimaryHDU(cleanFDF.real.astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_clean_real.fits"
        hdu0.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        if (verbose): log("> %s" % fitsFileOut)
        hdu1 = pf.PrimaryHDU(cleanFDF.imag.astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_clean_im.fits"
        hdu1.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        if (verbose): log("> %s" % fitsFileOut)
        hdu2 = pf.PrimaryHDU(np.abs(cleanFDF).astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_clean_tot.fits"
        hdu2.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        if (verbose): log("> %s" % fitsFileOut)

    if not write_separate_FDF:
        #Save the complex clean components as another file.
        fitsFileOut = outDir + "/" + prefixOut + "FDF_CC.fits"
        if (verbose): log("> %s" % fitsFileOut)
        hdu0 = pf.PrimaryHDU(ccArr.real.astype(dtFloat), headtemp)
        hdu1 = pf.ImageHDU(ccArr.imag.astype(dtFloat), headtemp)
        hdu2 = pf.ImageHDU(np.abs(ccArr).astype(dtFloat), headtemp)
        hduLst = pf.HDUList([hdu0, hdu1, hdu2])
        hduLst.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        hduLst.close()
    else:
        hdu0 = pf.PrimaryHDU(ccArr.real.astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_CC_real.fits"
        hdu0.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        if (verbose): log("> %s" % fitsFileOut)
        hdu1 = pf.PrimaryHDU(ccArr.imag.astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_CC_im.fits"
        hdu1.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        if (verbose): log("> %s" % fitsFileOut)
        hdu2 = pf.PrimaryHDU(np.abs(ccArr).astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_CC_tot.fits"
        hdu2.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        if (verbose): log("> %s" % fitsFileOut)

    #Because there can be problems with different axes having different FITS keywords,
    #don't try to remove the FD axis, but just make it degenerate.

    if headtemp['NAXIS'] > 2:
        headtemp["NAXIS3"] = 1
    if headtemp['NAXIS'] == 4:
        headtemp["NAXIS4"] = 1

    # Save the iteration count mask
    fitsFileOut = outDir + "/" + prefixOut + "CLEAN_nIter.fits"
    if (verbose): log("> %s" % fitsFileOut)
    headtemp["BUNIT"] = "Iterations"
    hdu0 = pf.PrimaryHDU(
        np.expand_dims(iterCountArr.astype(dtFloat),
                       axis=tuple(range(headtemp['NAXIS'] -
                                        iterCountArr.ndim))), headtemp)
    hduLst = pf.HDUList([hdu0])
    hduLst.writeto(fitsFileOut, output_verify="fix", overwrite=True)
    hduLst.close()
Example #8
0
def kepimages(infile,
              prefix,
              imtype='FLUX',
              ranges='0,0',
              overwrite=True,
              verbose=True,
              logfile='kepimages.log'):
    """
    kepimages -- create a series of separate FITS image files from a Target
    Pixel File

    ``kepimages`` will create a series of FITS image files which copy the
    images stored within a Target Pixel File (TPF). One FITS image file will be
    created for each Kepler exposure. The user can request all images within a
    TPF or a subset by supplying a series of Barycentric Julian Date ranges.
    FITS keywords from the primary and first extensions of the TPF are
    propagated into the output image files. The mid-time of each exposure
    (MIDTIME), Barycentric time correction (BARYCORR), cadence number
    (CADENCEN) and data quality flag (QUALITY) are exported to the FITS images
    as keywords. The position of the target during each observation, relative
    to the mid-time of the quarter, in both pixel column and row directions are
    recorded in keywords POSCORR1 and POSCORR2. If a cosmic ray event was
    recorded during an individual exposure then the keyword COSM_RAY is flagged
    as true. The keywords TELAPSE, LIVETIME and EXPOSURE are overwritten and\
    refer to the individual exposure times of each image.

    The user can choose from six different images to extract:

    * ``RAW_CNTS`` -- uncalibrated pixel count values
    * ``FLUX`` -- calibrated pixel, background-subtracted, cosmic ray-removed
      fluxes in units of :math:`e^{-}/s`
    * ``FLUX_ERR`` -- 1-:math:`\sigma` errors on the FLUX image, as
      propagated through the pixel calibration
    * ``FLUX_BKG`` -- the background that has been subtracted from the FLUX
      image in units of :math:`e^{-}/s`
    * ``FLUX_BKG_ERR`` -- 1-:math:`\sigma` errors on the FLUX_BKG image, as
      propagated through the pixel calibration
    * ``COSMIC_RAYS`` -- the cosmic ray map that has been subtracted from the
      FLUX image in units of :math:`e^{-}/s`

    We recommend the tools `ds9 <http://ds9.si.edu/>`_ and
    `fv <https://heasarc.gsfc.nasa.gov/ftools/fv/>`_ for the inspection of the
    FITS image products. The intent of the kepimages tool is to convert the
    TPF content into a form from which images can be imported into the array
    of public photometry software available to the K2 and Kepler communities.

    Parameters
    ----------
    infile : str
        Filename for the input Target Pixel File.
    prefix : str
        Prefix for the filenames of the output FITS images. Individual
        filenames will be prefix_BJDddddddd.dddd.fits, where
        ddddddd.dddd is the mid-time of the exposure in units of BJD.
    ranges : str
        The user can choose here specific time ranges of exposures from which
        to export images. Time ranges are supplied as comma-separated pairs of
        Barycentric Julian Dates (BJDs). Multiple ranges are separated by a
        semi-colon. An example containing two time ranges is::

            '2455641.658,2455641.740;2455671.658,2455672.740'
    overwrite : bool
        Overwrite the output file? if overwrite is False and an existing file has
        the same name as outfile then the task will stop with an error.
    verbose : bool
        Print informative messages and warnings to the shell and logfile?
    logfile : str
        Name of the logfile containing error and warning messages.

    Examples
    --------
    .. code-block: bash

        $ kepimages ktwo202073445-c00_lpd-targ.fits ktwo202073445-c00 --verbose
    """

    # log the call
    hashline = '--------------------------------------------------------------'
    kepmsg.log(logfile, hashline, verbose)
    call = ('KEPIMAGES -- ' + ' infile={}'.format(infile) +
            ' prefix={}'.format(prefix) + ' imtype={}'.format(imtype) +
            ' ranges={}'.format(ranges) + ' overwrite={}'.format(overwrite) +
            ' verbose={}'.format(verbose) + ' logfile={}'.format(logfile))
    kepmsg.log(logfile, call + '\n', verbose)

    kepmsg.clock('KEPIMAGES started at', logfile, verbose)

    # open input file
    print(' ')
    instr = pyfits.open(infile, mode='readonly', memmap=True)
    cards0 = instr[0].header.cards
    cards1 = instr[1].header.cards
    cards2 = instr[2].header.cards

    # fudge non-compliant FITS keywords with no values
    instr = kepkey.emptykeys(instr, infile, logfile, verbose)

    # ingest time series data
    time = instr[1].data.field('TIME')[:] + 2454833.0
    timecorr = instr[1].data.field('TIMECORR')[:]
    cadenceno = instr[1].data.field('CADENCENO')[:]
    raw_cnts = instr[1].data.field('RAW_CNTS')[:]
    flux = instr[1].data.field('FLUX')[:]
    flux_err = instr[1].data.field('FLUX_ERR')[:]
    flux_bkg = instr[1].data.field('FLUX_BKG')[:]
    flux_bkg_err = instr[1].data.field('FLUX_BKG_ERR')[:]
    cosmic_rays = instr[1].data.field('COSMIC_RAYS')[:]
    quality = instr[1].data.field('QUALITY')[:]
    pos_corr1 = instr[1].data.field('POS_CORR1')[:]
    pos_corr2 = instr[1].data.field('POS_CORR2')[:]

    # choose output image
    if imtype.lower() == 'raw_cnts':
        outim = raw_cnts
    elif imtype.lower() == 'flux_err':
        outim = flux_err
    elif imtype.lower() == 'flux_bkg':
        outim = flux_bkg
    elif imtype.lower() == 'flux_bkg_err':
        outim = flux_bkg_err
    elif imtype.lower() == 'cosmic_rays':
        outim = cosmic_rays
    else:
        outim = flux

    # identify images to be exported
    tim = np.array([])
    dat = np.array([])
    err = np.array([])
    tstart, tstop = kepio.timeranges(ranges, logfile, verbose)
    cadencelis = kepstat.filterOnRange(time, tstart, tstop)

    # provide name for each output file and overwrite if file exists
    for cadence in cadencelis:
        outfile = prefix + '_BJD%.4f' % time[cadence] + '.fits'
        if overwrite:
            kepio.overwrite(outfile, logfile, verbose)
        if kepio.fileexists(outfile):
            errmsg = ('ERROR -- KEPIMAGES: {} exists. Use --overwrite'.format(
                outfile))
            kepmsg.err(logfile, errmsg, True)

    # construct output primary extension
    ncad = 0
    for cadence in tqdm(cadencelis):
        outfile = prefix + '_BJD%.4f' % time[cadence] + '.fits'
        hdu0 = pyfits.PrimaryHDU()
        for i in range(len(cards0)):
            try:
                if cards0[i].keyword not in hdu0.header.keys():
                    hdu0.header[cards0[i].keyword] = (cards0[i].value,
                                                      cards0[i].comment)
                else:
                    hdu0.header.cards[
                        cards0[i].key].comment = cards0[i].comment
            except:
                pass
        kepkey.history(call, hdu0, outfile, logfile, verbose)
        outstr = pyfits.HDUList(hdu0)

        # construct output image extension
        hdu1 = pyfits.ImageHDU(flux[cadence])
        for i in range(len(cards2)):
            try:
                if cards2[i].keyword not in hdu1.header.keys():
                    hdu1.header[cards2[i].key] = (cards2[i].value,
                                                  cards2[i].comment)
            except:
                pass
        for i in range(len(cards1)):
            if (cards1[i].keyword not in hdu1.header.keys()
                    and cards1[i].keyword[:4] not in [
                        'TTYP', 'TFOR', 'TUNI', 'TDIS', 'TDIM', 'WCAX', '1CTY',
                        '2CTY', '1CRP', '2CRP', '1CRV', '2CRV', '1CUN', '2CUN',
                        '1CDE', '2CDE', '1CTY', '2CTY', '1CDL', '2CDL', '11PC',
                        '12PC', '21PC', '22PC', 'WCSN', 'TFIE'
                    ]):
                hdu1.header[cards1[i].keyword] = (cards1[i].value,
                                                  cards1[i].comment)
        try:
            int_time = cards1['INT_TIME'].value
        except:
            kepmsg.warn(logfile,
                        'WARNING -- KEPIMAGES: cannot find INT_TIME keyword')
        try:
            frametim = cards1['FRAMETIM'].value
        except:
            kepmsg.warn(logfile,
                        'WARNING -- KEPIMAGES: cannot find FRAMETIM keyword')
        try:
            num_frm = cards1['NUM_FRM'].value
        except:
            kepmsg.warn(logfile,
                        'WARNING -- KEPIMAGES: cannot find NUM_FRM keyword')

        hdu1.header['EXTNAME'] = ('IMAGE', 'name of extension')

        try:
            hdu1.header['TELAPSE'] = (frametim * num_frm,
                                      '[s] elapsed time for exposure')
        except:
            hdu1.header['TELAPSE'] = (-999, '[s] elapsed time for exposure')
        try:
            hdu1.header['LIVETIME'] = (int_time * num_frm,
                                       '[s] TELASPE multiplied by DEADC')
        except:
            hdu1.header['LIVETIME'] = (-999, '[s] TELASPE multiplied by DEADC')
        try:
            hdu1.header['EXPOSURE'] = (int_time * num_frm,
                                       '[s] time on source')
        except:
            hdu1.header['EXPOSURE'] = (-999, '[s] time on source')
        try:
            hdu1.header['MIDTIME'] = (time[cadence],
                                      '[BJD] mid-time of exposure')
        except:
            hdu1.header['MIDTIME'] = (-999, '[BJD] mid-time of exposure')
        try:
            hdu1.header['TIMECORR'] = (timecorr[cadence],
                                       '[d] barycenter - timeslice correction')
        except:
            hdu1.header['TIMECORR'] = (-999,
                                       '[d] barycenter - timeslice correction')
        try:
            hdu1.header['CADENCEN'] = (cadenceno[cadence],
                                       'unique cadence number')
        except:
            hdu1.header['CADENCEN'] = (-999, 'unique cadence number')
        try:
            hdu1.header['QUALITY'] = (quality[cadence], 'pixel quality flag')
        except:
            hdu1.header['QUALITY'] = (-999, 'pixel quality flag')
        try:
            if True in np.isfinite(cosmic_rays[cadence]):
                hdu1.header['COSM_RAY'] = (True, 'cosmic ray detected?')
            else:
                hdu1.header['COSM_RAY'] = (False, 'cosmic ray detected?')
        except:
            hdu1.header['COSM_RAY'] = (-999, 'cosmic ray detected?')
        try:
            pc1 = str(pos_corr1[cadence])
            pc2 = str(pos_corr2[cadence])
            hdu1.header['POSCORR1'] = (pc1, '[pix] column position correction')
            hdu1.header['POSCORR2'] = (pc2, '[pix] row position correction')
        except:
            hdu1.header['POSCORR1'] = (-999,
                                       '[pix] column position correction')
            hdu1.header['POSCORR2'] = (-999, '[pix] row position correction')
        outstr.append(hdu1)

        # write output file
        outstr.writeto(outfile, checksum=True)
        ncad += 1
        txt = '\r%3d%% ' % (float(ncad) / float(len(cadencelis)) * 100.0)
        txt += '%s ' % outfile
        sys.stdout.write(txt)
        sys.stdout.flush()

    # close input structure
    instr.close()
    print('\n')

    # end time
    kepmsg.clock('KEPIMAGES finished at', logfile, verbose)
def make_subject_fits(
        full_subject_set_id,
        center_workflow_id,
        spiral_workflow_id,
        bar_workflow_id,
        dimensions=[525, 525],
        image_location='/Volumes/SD_Extra/manga_images_production/MPL5',
        output='MPL5_fits'):
    blank_mask = np.zeros(dimensions)
    coords = [[x, y] for y in xrange(dimensions[1])
              for x in xrange(dimensions[0])]
    fullsample = SubjectSet.find(full_subject_set_id)
    subjects = fullsample.subjects()
    pbar = pb.ProgressBar(widgets=widgets, maxval=subjects.meta['count'])
    pbar.start()
    idx = 0
    for subject in subjects:
        subject_metadata = Table(dtype=metadata_dtype)
        subject_metadata.add_row(
            tuple(subject.raw['metadata'][key]
                  for key in subject_metadata.dtype.names))
        subject_metadata.rename_column('#MANGA_TILEID', 'MANGA_TILEID')
        subject_metadata_hdu = fits.table_to_hdu(subject_metadata)
        loc = '{0}/{1}_{2}.jpg'.format(
            image_location, subject_metadata['MANGAID'][0],
            int(subject_metadata['IFUDESIGNSIZE'][0]))
        output_name = '{0}/{1}_{2}_{3}.fits'.format(
            output, subject_metadata['MANGAID'][0],
            int(subject_metadata['IFUDESIGNSIZE'][0]), subject.id)
        if os.path.isfile('{0}.gz'.format(output_name)):
            # don't process the file if it already exists
            idx += 1
            pbar.update(idx)
            continue
        image = plt.imread(loc, format='jpeg')
        wcs = define_wcs(subject_metadata['ra'][0], subject_metadata['dec'][0])
        wcs_header = wcs.to_header()
        orig_image_hdu = fits.PrimaryHDU(data=image, header=wcs_header)
        # process data from center(s) and star(s) points
        center_classifications = make_classification_table(
            'center_points', 'star_points')
        all_center = []
        all_star = []
        for c in Classification.where(scope='project',
                                      workflow_id=center_workflow_id,
                                      subject_id=subject.id):
            record_base_classification(c, center_classifications)
            center_points = []
            star_points = []
            points = c.raw['annotations'][0]['value']
            for p in points:
                if ('x' in p) and ('y' in p):
                    if p['tool'] == 0:
                        # somehow the workflow_id got messed up for some classifications
                        # so a try statement is needed
                        loc = [p['x'], p['y']]
                        center_points.append(loc)
                        all_center.append(loc)
                    elif p['tool'] == 1:
                        loc = [p['x'], p['y']]
                        star_points.append(loc)
                        all_star.append(loc)
            center_classifications['center_points'].append(
                json.dumps(center_points))
            center_classifications['star_points'].append(
                json.dumps(star_points))
        center_star_table_hdu = fits.table_to_hdu(
            Table(center_classifications))
        # cluster points and make image masks
        if len(all_center):
            center_mask, center_table_hdu = cluster(np.array(all_center),
                                                    dimensions, coords, wcs)
        else:
            center_mask = blank_mask
            center_table_hdu = fits.table_to_hdu(Table(make_cluster_table()))
        center_hdu = fits.ImageHDU(data=center_mask, header=wcs_header)
        if len(all_star):
            star_mask, star_table_hdu = cluster(np.array(all_star), dimensions,
                                                coords, wcs)
        else:
            star_mask = blank_mask
            star_table_hdu = fits.table_to_hdu(Table(make_cluster_table()))
        star_hdu = fits.ImageHDU(data=star_mask, header=wcs_header)
        # spiral arms
        spiral_table_hdu, spiral_hdu = mask_process(spiral_workflow_id,
                                                    subject.id, 'spiral_paths',
                                                    wcs_header, dimensions,
                                                    coords)
        # bars
        bar_table_hdu, bar_hdu = mask_process(bar_workflow_id, subject.id,
                                              'bar_paths', wcs_header,
                                              dimensions, coords)
        # make fits file
        hdu_list = fits.HDUList([
            orig_image_hdu, center_hdu, star_hdu, spiral_hdu, bar_hdu,
            subject_metadata_hdu, center_table_hdu, star_table_hdu,
            center_star_table_hdu, spiral_table_hdu, bar_table_hdu
        ])
        hdu_list.writeto(output_name)
        # compress the fits file
        call(['gzip', output_name])
        # update progressbar
        idx += 1
        pbar.update(idx)
    pbar.finish()
Example #10
0
def test_read_oldcalfits():
    """
    Test for proper behavior with old calfits files.
    """
    # start with gain type files
    cal_in = UVCal()
    cal_out = UVCal()
    testfile = os.path.join(DATA_PATH, 'zen.2457698.40355.xx.fitsA')
    write_file = os.path.join(DATA_PATH, 'test/outtest_omnical.fits')
    message = testfile + ' appears to be an old calfits format which'
    uvtest.checkWarnings(cal_in.read_calfits, [testfile], message=message)

    # add total_quality_array so that can be tested as well
    cal_in.total_quality_array = np.zeros(
        cal_in._total_quality_array.expected_shape(cal_in))

    # now read in the file and remove various CRPIX and CRVAL keywords to
    # emulate old calfits files
    header_vals_to_remove = [{
        'primary': 'CRVAL5'
    }, {
        'primary': 'CRPIX4'
    }, {
        'totqual': 'CRVAL4'
    }]
    messages = [write_file, 'This file', write_file]
    messages = [m + ' appears to be an old calfits format' for m in messages]
    for i, hdr_dict in enumerate(header_vals_to_remove):
        cal_in.write_calfits(write_file, clobber=True)

        unit = hdr_dict.keys()[0]
        keyword = hdr_dict[unit]

        F = fits.open(write_file)
        data = F[0].data
        primary_hdr = F[0].header
        hdunames = uvutils.fits_indexhdus(F)
        ant_hdu = F[hdunames['ANTENNAS']]
        totqualhdu = F[hdunames['TOTQLTY']]
        totqualhdr = totqualhdu.header

        if unit == 'primary':
            primary_hdr.pop(keyword)
        elif unit == 'totqual':
            totqualhdr.pop(keyword)

        prihdu = fits.PrimaryHDU(data=data, header=primary_hdr)
        hdulist = fits.HDUList([prihdu, ant_hdu])
        totqualhdu = fits.ImageHDU(data=totqualhdu.data, header=totqualhdr)
        hdulist.append(totqualhdu)

        if float(astropy.__version__[0:3]) < 1.3:
            hdulist.writeto(write_file, clobber=True)
        else:
            hdulist.writeto(write_file, overwrite=True)

        uvtest.checkWarnings(cal_out.read_calfits, [write_file],
                             message=messages[i])
        nt.assert_equal(cal_in, cal_out)
        nt.assert_raises(KeyError,
                         cal_out.read_calfits,
                         write_file,
                         strict_fits=True)

    # now with delay type files
    cal_in = UVCal()
    cal_out = UVCal()
    testfile = os.path.join(DATA_PATH, 'zen.2457698.40355.xx.HH.uvc.fits')
    write_file = os.path.join(DATA_PATH, 'test/outtest_firstcal.fits')
    message = [
        testfile + ' appears to be an old calfits format which',
        testfile + ' appears to be an old calfits format for delay files'
    ]
    uvtest.checkWarnings(cal_in.read_calfits, [testfile],
                         message=message,
                         nwarnings=2)

    # add total_quality_array so that can be tested as well
    cal_in.total_quality_array = np.zeros(
        cal_in._total_quality_array.expected_shape(cal_in))

    # now read in the file and remove various CRPIX and CRVAL keywords to
    # emulate old calfits files
    header_vals_to_remove = [{
        'primary': 'CRVAL5'
    }, {
        'flag': 'CRVAL5'
    }, {
        'flag': 'CRPIX4'
    }, {
        'totqual': 'CRVAL4'
    }]
    messages = [write_file, 'This file', 'This file', write_file]
    messages = [m + ' appears to be an old calfits format' for m in messages]
    for i, hdr_dict in enumerate(header_vals_to_remove):
        cal_in.write_calfits(write_file, clobber=True)

        unit = hdr_dict.keys()[0]
        keyword = hdr_dict[unit]

        F = fits.open(write_file)
        data = F[0].data
        primary_hdr = F[0].header
        hdunames = uvutils.fits_indexhdus(F)
        ant_hdu = F[hdunames['ANTENNAS']]
        flag_hdu = F[hdunames['FLAGS']]
        flag_hdr = flag_hdu.header
        totqualhdu = F[hdunames['TOTQLTY']]
        totqualhdr = totqualhdu.header

        if unit == 'primary':
            primary_hdr.pop(keyword)
        elif unit == 'flag':
            flag_hdr.pop(keyword)
        elif unit == 'totqual':
            totqualhdr.pop(keyword)

        prihdu = fits.PrimaryHDU(data=data, header=primary_hdr)
        hdulist = fits.HDUList([prihdu, ant_hdu])
        flag_hdu = fits.ImageHDU(data=flag_hdu.data, header=flag_hdr)
        hdulist.append(flag_hdu)
        totqualhdu = fits.ImageHDU(data=totqualhdu.data, header=totqualhdr)
        hdulist.append(totqualhdu)

        if float(astropy.__version__[0:3]) < 1.3:
            hdulist.writeto(write_file, clobber=True)
        else:
            hdulist.writeto(write_file, overwrite=True)

        uvtest.checkWarnings(cal_out.read_calfits, [write_file],
                             message=messages[i])
        nt.assert_equal(cal_in, cal_out)
        nt.assert_raises(KeyError,
                         cal_out.read_calfits,
                         write_file,
                         strict_fits=True)
Example #11
0
def test_errors():
    """
    Test for various errors.

    """
    cal_in = UVCal()
    cal_out = UVCal()
    testfile = os.path.join(DATA_PATH, 'zen.2457698.40355.xx.HH.uvc.fits')
    write_file = os.path.join(DATA_PATH, 'test/outtest_firstcal.fits')
    message = [
        testfile + ' appears to be an old calfits format which',
        testfile + ' appears to be an old calfits format for delay files'
    ]
    uvtest.checkWarnings(cal_in.read_calfits, [testfile],
                         message=message,
                         nwarnings=2)

    cal_in.set_unknown_cal_type()
    nt.assert_raises(ValueError,
                     cal_in.write_calfits,
                     write_file,
                     run_check=False,
                     clobber=True)

    # change values for various axes in flag and total quality hdus to not match primary hdu
    uvtest.checkWarnings(cal_in.read_calfits, [testfile],
                         message=message,
                         nwarnings=2)
    # Create filler jones info
    cal_in.jones_array = np.array([-5, -6, -7, -8])
    cal_in.Njones = 4
    cal_in.flag_array = np.zeros(cal_in._flag_array.expected_shape(cal_in),
                                 dtype=bool)
    cal_in.delay_array = np.ones(cal_in._delay_array.expected_shape(cal_in),
                                 dtype=np.float64)
    cal_in.quality_array = np.zeros(
        cal_in._quality_array.expected_shape(cal_in))

    # add total_quality_array so that can be tested as well
    cal_in.total_quality_array = np.zeros(
        cal_in._total_quality_array.expected_shape(cal_in))

    header_vals_to_double = [{
        'flag': 'CDELT2'
    }, {
        'flag': 'CDELT3'
    }, {
        'flag': 'CRVAL5'
    }, {
        'totqual': 'CDELT1'
    }, {
        'totqual': 'CDELT2'
    }, {
        'totqual': 'CRVAL4'
    }]
    for i, hdr_dict in enumerate(header_vals_to_double):
        cal_in.write_calfits(write_file, clobber=True)

        unit = hdr_dict.keys()[0]
        keyword = hdr_dict[unit]

        F = fits.open(write_file)
        data = F[0].data
        primary_hdr = F[0].header
        hdunames = uvutils.fits_indexhdus(F)
        ant_hdu = F[hdunames['ANTENNAS']]
        flag_hdu = F[hdunames['FLAGS']]
        flag_hdr = flag_hdu.header
        totqualhdu = F[hdunames['TOTQLTY']]
        totqualhdr = totqualhdu.header

        if unit == 'flag':
            flag_hdr[keyword] *= 2
        elif unit == 'totqual':
            totqualhdr[keyword] *= 2

        prihdu = fits.PrimaryHDU(data=data, header=primary_hdr)
        hdulist = fits.HDUList([prihdu, ant_hdu])
        flag_hdu = fits.ImageHDU(data=flag_hdu.data, header=flag_hdr)
        hdulist.append(flag_hdu)
        totqualhdu = fits.ImageHDU(data=totqualhdu.data, header=totqualhdr)
        hdulist.append(totqualhdu)

        if float(astropy.__version__[0:3]) < 1.3:
            hdulist.writeto(write_file, clobber=True)
        else:
            hdulist.writeto(write_file, overwrite=True)

        nt.assert_raises(ValueError,
                         cal_out.read_calfits,
                         write_file,
                         strict_fits=True)

    # repeat for gain type file
    testfile = os.path.join(DATA_PATH, 'zen.2457698.40355.xx.fitsA')
    write_file = os.path.join(DATA_PATH, 'test/outtest_omnical.fits')
    message = testfile + ' appears to be an old calfits format which'
    uvtest.checkWarnings(cal_in.read_calfits, [testfile], message=message)

    # Create filler jones info
    cal_in.jones_array = np.array([-5, -6, -7, -8])
    cal_in.Njones = 4
    cal_in.flag_array = np.zeros(cal_in._flag_array.expected_shape(cal_in),
                                 dtype=bool)
    cal_in.gain_array = np.ones(cal_in._gain_array.expected_shape(cal_in),
                                dtype=np.complex64)
    cal_in.quality_array = np.zeros(
        cal_in._quality_array.expected_shape(cal_in))

    # add total_quality_array so that can be tested as well
    cal_in.total_quality_array = np.zeros(
        cal_in._total_quality_array.expected_shape(cal_in))

    header_vals_to_double = [{
        'totqual': 'CDELT1'
    }, {
        'totqual': 'CDELT2'
    }, {
        'totqual': 'CDELT3'
    }, {
        'totqual': 'CRVAL4'
    }]

    for i, hdr_dict in enumerate(header_vals_to_double):
        cal_in.write_calfits(write_file, clobber=True)

        unit = hdr_dict.keys()[0]
        keyword = hdr_dict[unit]

        F = fits.open(write_file)
        data = F[0].data
        primary_hdr = F[0].header
        hdunames = uvutils.fits_indexhdus(F)
        ant_hdu = F[hdunames['ANTENNAS']]
        totqualhdu = F[hdunames['TOTQLTY']]
        totqualhdr = totqualhdu.header

        if unit == 'totqual':
            totqualhdr[keyword] *= 2

        prihdu = fits.PrimaryHDU(data=data, header=primary_hdr)
        hdulist = fits.HDUList([prihdu, ant_hdu])
        totqualhdu = fits.ImageHDU(data=totqualhdu.data, header=totqualhdr)
        hdulist.append(totqualhdu)

        if float(astropy.__version__[0:3]) < 1.3:
            hdulist.writeto(write_file, clobber=True)
        else:
            hdulist.writeto(write_file, overwrite=True)

        nt.assert_raises(ValueError,
                         cal_out.read_calfits,
                         write_file,
                         strict_fits=True)
Example #12
0
array1 = np.ones((3))
array2 = 2.0 * np.ones((3))
array3 = 3.0 * np.ones((3))
array4 = 4.0 * np.ones((3))
array5 = 5.0 * np.ones((3))
array6 = 6.0 * np.ones((3))
array7 = 7.0 * np.ones((3))
array8 = 8.0 * np.ones((3))

# fits.writeto(simulator=array1, filename=path + "3_ones.fits", overwrite=True)
# fits.writeto(simulator=array2, filename=path + "3_twos.fits")
# fits.writeto(simulator=array3, filename=path + "3_threes.fits")
# fits.writeto(simulator=array4, filename=path + "3_fours.fits")
# fits.writeto(simulator=array5, filename=path + "3_fives.fits")
# fits.writeto(simulator=array6, filename=path + "3_sixes.fits")
# fits.writeto(simulator=array7, filename=path + "3_sevens.fits")
# fits.writeto(simulator=array8, filename=path + "3_eights.fits")

new_hdul = fits.HDUList()
new_hdul.append(fits.ImageHDU(array1))
new_hdul.append(fits.ImageHDU(array2))
new_hdul.append(fits.ImageHDU(array3))
new_hdul.append(fits.ImageHDU(array4))
new_hdul.append(fits.ImageHDU(array5))
new_hdul.append(fits.ImageHDU(array6))
new_hdul.append(fits.ImageHDU(array7))
new_hdul.append(fits.ImageHDU(array8))

new_hdul.writeto(path + "3_multiple_hdu.fits")
Example #13
0
def gen_atmos(plot=False):
    """
    generates atmospheric phase distortions using hcipy (updated from original using CAOS)

    read more on HCIpy here: https://hcipy.readthedocs.io/en/latest/index.html
    In hcipy, the atmosphere evolves as a function of time, specified by the user. User can thus specify the
    timescale of evolution through both velocity of layer and time per step in the obs_sequence, in loop for
    medis_main.gen_timeseries().

    :param plot: turn plotting on or off
    :return:
    """
    dprint("Making New Atmosphere Model")
    # Saving Parameters
    # np.savetxt(iop.atmosconfig, ['Grid Size', 'Wvl Range', 'Number of Frames', 'Layer Strength', 'Outer Scale', 'Velocity', 'Scale Height', cp.model])
    # np.savetxt(iop.atmosconfig, ['ap.grid_size', 'ap.wvl_range', 'ap.numframes', 'atmp.cn_sq', 'atmp.L0', 'atmp.vel', 'atmp.h', 'cp.model'])
    # np.savetxt(iop.atmosconfig, [ap.grid_size, ap.wvl_range, ap.numframes, atmp.cn_sq, atmp.L0, atmp.vel, atmp.h, cp.model], fmt='%s')

    wsamples = np.linspace(ap.wvl_range[0], ap.wvl_range[1], ap.n_wvl_init)
    wavefronts = []

    ##################################
    # Initiate HCIpy Atmosphere Type
    ##################################
    pupil_grid = hcipy.make_pupil_grid(sp.grid_size, tp.entrance_d)
    if atmp.model == 'single':
        layers = [
            hcipy.InfiniteAtmosphericLayer(pupil_grid, atmp.cn_sq, atmp.L0,
                                           atmp.vel, atmp.h, 2)
        ]
    elif atmp.model == 'hcipy_standard':
        # Make multi-layer atmosphere
        layers = hcipy.make_standard_atmospheric_layers(
            pupil_grid, atmp.outer_scale)
    elif atmp.model == 'evolving':
        raise NotImplementedError
    atmos = hcipy.MultiLayerAtmosphere(layers, scintilation=False)

    for wavelength in wsamples:
        wavefronts.append(
            hcipy.Wavefront(hcipy.Field(np.ones(pupil_grid.size), pupil_grid),
                            wavelength))

    ###########################################
    # Evolving Wavefront using HCIpy tools
    ###########################################
    for it, t in enumerate(
            np.arange(0, sp.numframes * sp.sample_time, sp.sample_time)):
        atmos.evolve_until(t)
        for iw, wf in enumerate(wavefronts):
            wf2 = atmos.forward(wf)

            filename = get_filename(it, wsamples[iw])
            dprint(f"atmos file = {filename}")
            hdu = fits.ImageHDU(wf2.phase.reshape(sp.grid_size, sp.grid_size))
            hdu.header['PIXSIZE'] = tp.entrance_d / sp.grid_size
            hdu.writeto(filename, overwrite=True)

            if plot and iw == 0:
                import matplotlib.pyplot as plt
                from medis.twilight_colormaps import sunlight
                plt.figure()
                plt.title(
                    f"Atmosphere Phase Map t={t} lambda={eformat(wsamples[iw], 3, 2)}"
                )
                hcipy.imshow_field(wf2.phase, cmap=sunlight)
                plt.colorbar()
                plt.show(block=True)
Example #14
0
    def get_amplifier_hdu(self, amp_name, compress=True):
        """
        Get an astropy.io.fits.HDU for the specified amplifier.

        Parameters
        ----------
        amp_name: str
            The amplifier name, e.g., "R22_S11_C00".
        compress: bool [True]
            Use RICE_1 compression.

        Returns
        -------
        astropy.io.fits.ImageHDU
            Image HDU with the pixel data and header keywords
            appropriate for the requested sensor segment.
        """
        data = self.amp_images[amp_name].getArray().astype(np.int32)
        if compress:
            hdu = fits.CompImageHDU(data=data, compression_type='RICE_1')
        else:
            hdu = fits.ImageHDU(data=data)
        hdr = hdu.header
        amp_info = self.camera_info.get_amp_info(amp_name)
        # Copy keywords from eimage primary header.
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            for key in self.eimage[0].header.keys():
                if key in ('BITPIX', 'NAXIS'):
                    continue
                try:
                    hdr[key] = self.eimage[0].header[key]
                except ValueError:
                    # eimages produced by phosim contain non-ASCII or
                    # non-printable characters resulting in a ValueError.
                    self.logger.warn(
                        "ValueError raised while attempting to "
                        "read %s from eimage header", key)

        # Transpose the WCS matrix elements to account for the use of the
        # Camera Coordinate System in the eimage.  These changes
        # neglect any implied changes in the SIP coefficients.
        channels = '10 11 12 13 14 15 16 17 07 06 05 04 03 02 01 00'.split()
        amp_nums = dict(kv_pair for kv_pair in zip(channels, range(16)))
        amp_num = amp_nums[amp_name[-2:]]
        # These keywords seem to give approximately correct per amp
        # WCS's when viewed with ds9.
        x_pos = (list(range(1, 9)) + list(range(8, 0, -1)))[amp_num]
        hdr['CRPIX1'], hdr['CRPIX2'] \
            = (hdr['CRPIX2'] - amp_info.getRawDataBBox().getWidth()*(8 - x_pos),
               hdr['CRPIX1'])
        if amp_num < 8:
            hdr['CD1_1'], hdr['CD1_2'] = -hdr['CD1_2'], hdr['CD1_1']
            hdr['CD2_1'], hdr['CD2_2'] = -hdr['CD2_2'], hdr['CD2_1']
        else:
            hdr['CD1_1'], hdr['CD1_2'] = -hdr['CD1_2'], -hdr['CD1_1']
            hdr['CD2_1'], hdr['CD2_2'] = -hdr['CD2_2'], -hdr['CD2_1']

        # Set NOAO geometry keywords.
        hdr['DATASEC'] = self._noao_section_keyword(amp_info.getRawDataBBox())
        hdr['DETSEC'] = \
            self._noao_section_keyword(self.camera_info.mosaic_section(amp_info),
                                       flipx=amp_info.getRawFlipX(),
                                       flipy=amp_info.getRawFlipY())
        hdr['GAIN'] = amp_info.getGain()

        return hdu
Example #15
0
 def __init__(self, header, **kwargs):
     image = np.zeros((header["NAXIS2"], header["NAXIS1"]))
     self._hdu = fits.ImageHDU(header=header, data=image)
     self.meta = {}
     self.meta.update(header)
     self.meta.update(kwargs)
Example #16
0
    def _subtract_background(self):
        """
        Make the background subtraction
        """
        # go over the object drizzle list
        for index in range(len(self.obj_dol)):

            # take and compose the filenames
            obj_img = self.obj_dol[index].ext_names['MEF']
            bck_img = self.bck_dol[index].ext_names['MEF']

            # make sure the ID's of object and background match
            if self.obj_dol[index].objID != self.bck_dol[index].objID:
                err_msg = ("The object ID: {0:s} and background ID {1:s} are"
                           " not identical!".format(self.obj_dol[index].objID,
                                                    self.bck_dol[index].objID))
                raise aXeError(err_msg)

            # open the fits file
            obj_fits = fits.open(obj_img, 'update')
            bck_fits = fits.open(bck_img, 'readonly')

            # compose an image HDU for the background
            # and the background error
            bck_sci = fits.ImageHDU(data=bck_fits['SCI'].data,
                                    header=bck_fits['SCI'].header,
                                    name='SCIBCK')
            bck_err = fits.ImageHDU(data=bck_fits['ERR'].data,
                                    header=bck_fits['ERR'].header,
                                    name='ERRBCK')

            # subtract the background;
            # process the error
            obj_fits['SCI'].data = obj_fits['SCI'].data - bck_sci.data
            obj_fits['ERR'].data = np.sqrt(obj_fits['ERR'].data *
                                           obj_fits['ERR'].data +
                                           bck_err.data * bck_err.data)

            # manifest in header
            hist_string1 = 'The extension SCIBCK and ERRBCK were used for '
            hist_string2 = 'background and background error'
            obj_fits['SCI'].header['SCIBCK'] = ('DONE', "subtraction of "
                                                "background stamp")
            obj_fits['SCI'].header['ERRBCK'] = ('DONE', "processing of "
                                                "background stamp "
                                                "error")
            obj_fits['SCI'].header.add_history(hist_string1)
            obj_fits['SCI'].header.add_history(hist_string2)

            # append the new background
            # and the error to the object fits
            obj_fits.append(bck_sci)
            obj_fits.append(bck_err)

            # store the images
            # and delete the objects
            obj_fits.flush()
            obj_fits.close()
            bck_fits.close()
            del bck_sci
            del bck_err

        # delete all background files
        self.bck_dol.delete_files()
Example #17
0
-------------------

"""

import os

##############################################################################
# HDUList objects are used to hold all the HDUs in a FITS file. This
# ``HDUList`` class is a subclass of Python’s builtin `list`. and can be
# created from scratch. For example, to create a FITS file with
# three extensions:

from astropy.io import fits
new_hdul = fits.HDUList()
new_hdul.append(fits.ImageHDU())
new_hdul.append(fits.ImageHDU())

##############################################################################
# Write out the new file to disk:

new_hdul.writeto('test.fits')

##############################################################################
# Alternatively, the HDU instances can be created first (or read from an
# existing FITS file).
#
# Create a multi-extension FITS file with two empty IMAGE extensions (a
# default PRIMARY HDU is prepended automatically if one is not specified;
# we use ``overwrite=True`` to overwrite the file if it already exists):
Example #18
0
    def generate_output(self):
        """
        Run the processing of the input file
        :return:
        """

        # Primary HDU with general info
        ## hdu0 = fits.PrimaryHDU()
        ## hdul = fits.HDUList([hdu0])
        ##
        ## # Then, an extension with each image, without any actual order
        ## for ccdn,img in zip(ccd_number, images):
        ##     hdu = fits.ImageHDU(img)
        ##     hdu.header['CCD_NUM'] = ccdn
        ##     hdul.append(hdu)
        ##
        ## hdul.writeto('test.fits', overwrite=True)

        if self.output_type == OutputType.LE1:
            le1file = os.path.join(self.output_dir,
                                   os.path.basename(self.input_file) + '.fits')

            hdu0 = fits.PrimaryHDU()
            hdu0.writeto(le1file, overwrite=True)

            for extn in range(1, 145):
                (b, n, q) = self.ext2quad[extn]
                ccdn = (b - 1) * 6 + n - 1
                qlet = 'EFGH'[q - 1:q]
                img = self.images[ccdn]
                logger.debug(f'({b}, {n}, {q}) => {extn} => {ccdn + 1}.{qlet}')
                if q == 1:  # E
                    hdu = fits.ImageHDU(img[0:VISSize.ROWS_HALF,
                                            0:VISSize.COLS_HALF])
                elif q == 2:  # F
                    hdu = fits.ImageHDU(img[0:VISSize.ROWS_HALF,
                                            -1:-VISSize.COLS_HALF - 1:-1])
                elif q == 3:  # G
                    hdu = fits.ImageHDU(img[-1:-VISSize.ROWS_HALF - 1:-1,
                                            -1:VISSize.COLS_HALF - 1:-1])
                else:  # H
                    hdu = fits.ImageHDU(img[-1:-VISSize.ROWS_HALF - 1:-1,
                                            0:VISSize.COLS_HALF])
                hdu.header['EXTNAME'] = f'CCD_{b}-{n}.{q}'
                hdu.header['CCDNUM'] = ccdn
                hdu.header['QUADRANT'] = qlet
                fits.append(le1file, hdu.data, hdu.header)

        elif self.output_type == OutputType.QUAD:
            hdu0 = fits.PrimaryHDU()
            hdul = fits.HDUList([hdu0])
            for ccdn, img in zip(self.ccd_number, self.images):
                # E
                hdu = fits.ImageHDU(img[0:VISSize.ROWS_HALF,
                                        0:VISSize.COLS_HALF])
                hdu.header['CCD_NUM'] = ccdn
                hdu.header['QUADRANT'] = 'E'
                hdul.append(hdu)

                # F
                hdu = fits.ImageHDU(img[0:VISSize.ROWS_HALF,
                                        -1:-VISSize.COLS_HALF - 1:-1])
                hdu.header['CCD_NUM'] = ccdn
                hdu.header['QUADRANT'] = 'F'
                hdul.append(hdu)

                # G
                hdu = fits.ImageHDU(img[-1:-VISSize.ROWS_HALF - 1:-1,
                                        -1:VISSize.COLS_HALF - 1:-1])
                hdu.header['CCD_NUM'] = ccdn
                hdu.header['QUADRANT'] = 'G'
                hdul.append(hdu)

                # H
                hdu = fits.ImageHDU(img[-1:-VISSize.ROWS_HALF - 1:-1,
                                        0:VISSize.COLS_HALF])
                hdu.header['CCD_NUM'] = ccdn
                hdu.header['QUADRANT'] = 'H'
                hdul.append(hdu)

            hdul.writeto(os.path.join(
                self.output_dir,
                os.path.basename(self.input_file) + '.fits'),
                         overwrite=True)

        elif self.output_type == OutputType.FPA:
            hdu0 = fits.PrimaryHDU()
            hdul = fits.HDUList([hdu0])
            for ccdn, img in zip(self.ccd_number, self.images):
                hdu = fits.ImageHDU(img)
                hdu.header['CCD_NUM'] = ccdn
                hdul.append(hdu)

            hdul.writeto(os.path.join(
                self.output_dir,
                os.path.basename(self.input_file) + '.fits'),
                         overwrite=True)

        elif self.output_type == OutputType.FULL_FPA:
            logger.warning('Full-FPA output format still not supported')

        else:
            logger.error('No output will be produced')
Example #19
0
def combine_flt(files=[],
                output='exposures_cmb.fits',
                grow=1,
                add_padding=True,
                pixfrac=0.5,
                kernel='point',
                verbose=True,
                clobber=True,
                ds9=None):
    """Drizzle distorted FLT frames to an "interlaced" image
        
    Parameters
    ----------
    files : list of strings
        Filenames of FLT files to combine
    
    output : str
        Output filename of the combined file.  Convention elsewhere is to use
        an "_cmb.fits" extension to distinguish from "_flt.fits".
        
    grow : int
        Factor by which to `grow` the FLT frames to interlaced outputs.  For 
        example, `grow=2` results in 2x2 interlacing.
    
    add_padding : True
        Expand pixel grid to accommodate all dithered exposures.  WCS is 
        preserved but "CRPIX" will change.
        
    pixfrac : float
        Drizzle pixfrac (for kernels other than 'point')
    
    kernel : {'point', 'square'}
        Drizzle kernel. The 'point' kernel is effectively interlacing and is
        best for preserving the noise properties of the final combined image.
        However, can result in empty pixels given the camera distortions
        depending on the dithering of the input exposures.
    
    ds9 : `~grizli.ds9.DS9`
        Display the progress of the script to a DS9 window.
        
    verbose : bool
        Print logging information
        
    clobber : bool
        Overwrite existing files
    
    Returns
    -------
    Creates combined images
    
    """
    import numpy.linalg
    from stsci.tools import asnutil
    from drizzlepac import astrodrizzle

    ###  `files` is an ASN filename, not  a list of exposures
    if '_asn.fits' in files:
        asn = asnutil.readASNTable(files)
        files = ['{0}_flt.fits'.format(flt) for flt in asn['order']]
        if output == 'combined_flt.fits':
            output = '{0}_cmb.fits'.format(asn['output'])

    if False:
        files = glob.glob('ibhj3*flt.fits')
        files.sort()
        grism_files = files[1::2]

        ### FIGS
        info = catIO.Table('info')
        pas = np.cast[int](info['PA_V3'] * 10) / 10.
        pa_list = np.unique(pas)
        grism_files = info['FILE'][(info['FILTER'] == 'G102')
                                   & (pas == pa_list[0])]

        files = grism_files
        #utils = grizlidev.utils

    f0 = pyfits.open(files[0])
    h0 = f0[0].header.copy()
    h0['EXPTIME'] = 0.
    h0['NFILES'] = (len(files), 'Number of combined files')

    out_wcs = pywcs.WCS(f0[1].header, relax=True)
    out_wcs.pscale = utils.get_wcs_pscale(out_wcs)
    # out_wcs.pscale = np.sqrt(out_wcs.wcs.cd[0,0]**2 +
    #                          out_wcs.wcs.cd[1,0]**2)*3600.

    ### Compute maximum offset needed for padding
    if add_padding:
        ra0, de0 = out_wcs.all_pix2world([0], [0], 0)

        x0 = np.zeros(len(files))
        y0 = np.zeros(len(files))

        for i, file in enumerate(files):
            hx = pyfits.getheader(file, 0)
            h0['EXPTIME'] += hx['EXPTIME']
            h0['FILE{0:04d}'.format(i)] = (file,
                                           'Included file #{0:d}'.format(i))

            h = pyfits.getheader(file, 1)
            flt_wcs = pywcs.WCS(h, relax=True)
            x0[i], y0[i] = flt_wcs.all_world2pix(ra0, de0, 0)

        xmax = np.abs(x0).max()
        ymax = np.abs(y0).max()
        padx = 50 * int(np.ceil(xmax / 50.))
        pady = 50 * int(np.ceil(ymax / 50.))
        pad = np.maximum(padx, pady) * grow

        if verbose:
            print('Maximum shift (x, y) = ({0:6.1f}, {1:6.1f}), pad={2:d}'.
                  format(xmax, ymax, pad))
    else:
        pad = 0

    inter_wcs = out_wcs.deepcopy()
    if grow > 1:
        inter_wcs.wcs.cd /= grow
        for i in range(inter_wcs.sip.a_order + 1):
            for j in range(inter_wcs.sip.a_order + 1):
                inter_wcs.sip.a[i, j] /= grow**(i + j - 1)

        for i in range(inter_wcs.sip.b_order + 1):
            for j in range(inter_wcs.sip.b_order + 1):
                inter_wcs.sip.b[i, j] /= grow**(i + j - 1)

        inter_wcs._naxis1 *= grow
        inter_wcs._naxis2 *= grow
        inter_wcs.wcs.crpix *= grow
        inter_wcs.sip.crpix[0] *= grow
        inter_wcs.sip.crpix[1] *= grow

        if grow > 1:
            inter_wcs.wcs.crpix += grow / 2.
            inter_wcs.sip.crpix[0] += grow / 2.
            inter_wcs.sip.crpix[1] += grow / 2.

    inter_wcs._naxis1 += pad
    inter_wcs._naxis2 += pad
    inter_wcs.wcs.crpix += pad
    inter_wcs.sip.crpix[0] += pad
    inter_wcs.sip.crpix[1] += pad

    outh = inter_wcs.to_header(relax=True)
    for key in outh:
        if key.startswith('PC'):
            outh.rename_keyword(key, key.replace('PC', 'CD'))

    outh['GROW'] = grow, 'Grow factor'
    outh['PAD'] = pad, 'Image padding'
    outh['BUNIT'] = h['BUNIT']

    sh = (1014 * grow + 2 * pad, 1014 * grow + 2 * pad)
    outsci = np.zeros(sh, dtype=np.float32)
    outwht = np.zeros(sh, dtype=np.float32)
    outctx = np.zeros(sh, dtype=np.int32)

    ## Pixel area map
    # PAM_im = pyfits.open(os.path.join(os.getenv('iref'), 'ir_wfc3_map.fits'))
    # PAM = PAM_im[1].data

    for i, file in enumerate(files):
        im = pyfits.open(file)

        if verbose:
            print('{0:3d} {1:s} {2:6.1f} {3:6.1f} {4:10.2f}'.format(
                i + 1, file, x0[i], y0[i], im[0].header['EXPTIME']))

        dq = utils.unset_dq_bits(im['DQ'].data, okbits=608, verbose=False)
        wht = 1. / im['ERR'].data**2
        wht[(im['ERR'].data == 0) | (dq > 0) | (~np.isfinite(wht))] = 0
        wht[im['SCI'].data < -3 * im['ERR'].data] = 0

        wht = np.cast[np.float32](wht)

        exp_wcs = pywcs.WCS(im[1].header, relax=True)
        exp_wcs.pscale = utils.get_wcs_pscale(exp_wcs)

        #pf = 0.5
        # import drizzlepac.wcs_functions as dwcs
        # xx = out_wcs.deepcopy()
        # #xx.all_pix2world = xx.wcs_world2pix
        # map = dwcs.WCSMap(exp_wcs, xx)

        astrodrizzle.adrizzle.do_driz(im['SCI'].data,
                                      exp_wcs,
                                      wht,
                                      inter_wcs,
                                      outsci,
                                      outwht,
                                      outctx,
                                      1.,
                                      'cps',
                                      1,
                                      wcslin_pscale=exp_wcs.pscale,
                                      uniqid=1,
                                      pixfrac=pixfrac,
                                      kernel=kernel,
                                      fillval=0,
                                      stepsize=10,
                                      wcsmap=SIP_WCSMap)

        if ds9 is not None:
            ds9.view(outsci, header=outh)

    #outsci /= out_wcs.pscale**2
    rms = 1 / np.sqrt(outwht)
    mask = (outwht == 0) | (rms > 100)
    rms[mask] = 0
    outsci[mask] = 0.

    hdu = [pyfits.PrimaryHDU(header=h0)]
    hdu.append(pyfits.ImageHDU(data=outsci / grow**2, header=outh, name='SCI'))
    hdu.append(pyfits.ImageHDU(data=rms / grow**2, header=outh, name='ERR'))
    hdu.append(pyfits.ImageHDU(data=mask * 1024, header=outh, name='DQ'))

    pyfits.HDUList(hdu).writeto(output, clobber=clobber, output_verify='fix')
Example #20
0
    def fit_zgrid(self,
                  dz0=0.005,
                  zr=[0.4, 3.4],
                  fitter='nnls',
                  make_plot=True,
                  save_data=True,
                  prior=None,
                  templates_file='templates.npy',
                  verbose=True,
                  outlier_threshold=1e30,
                  eazyp=None,
                  ix=0,
                  order=0,
                  scale_fit=None):
        """Fit templates on a redshift grid.
        
        Parameters
        ----------
        dz0 : float
            Initial step size of the redshift grid (dz/1+z).
        
        zr : list
            Redshift range to consider.
        
        fitter : str
            Minimization algorithm.  Default is non-negative least-squares.
        
        make_plot : bool
            Make the diagnostic plot.
        
        prior : list
            Naive prior to add to the nominal chi-squared(z) of the template
            fits.  The example below is a simple Gaussian prior centered
            at z=1.5. 
            
                >>> z_prior = np.arange(0,3,0.001)
                >>> chi_prior = (z_prior-1.5)**2/2/0.1**2
                >>> prior = [z_prior, chi_prior]
        
        templates_file : str
            Filename of the `~numpy` save file containing the templates.  Use 
            the `make_templates` script to generate this.
            
        verbose : bool
            Print the redshift grid steps.
        
        Returns
        -------
        hdu : `~astropy.io.fits.HDUList`
            Multi-extension FITS file with the result of the redshift fits.
        
        """
        import os
        import grizli
        import matplotlib.gridspec
        import matplotlib.pyplot as plt
        import numpy as np

        t_complex, t_i = np.load(templates_file)

        z = grizli.utils.log_zgrid(zr=zr, dz=dz0)
        chi2 = z * 0.
        for i in range(len(z)):
            if eazyp:
                out = self.fit_combined_at_z(z=z[i],
                                             eazyp=eazyp,
                                             ix=ix,
                                             order=order,
                                             scale_fit=scale_fit)
                chi2[i], bg, full, coeffs, err, scale_fit = out
            else:
                out = self.fit_at_z(z=z[i], templates=t_complex)
                chi2[i], bg, full, coeffs, err = out

            if verbose:
                print('{0:.4f} - {1:10.1f}'.format(z[i], chi2[i]))

        # Zoom in on the chi-sq minimum.
        ci = chi2
        zi = z
        for iter in range(1, 7):
            if prior is not None:
                pz = np.interp(zi, prior[0], prior[1])
                cp = ci + pz
            else:
                cp = ci

            iz = np.argmin(cp)
            z0 = zi[iz]
            dz = dz0 / 2.02**iter
            zi = grizli.utils.log_zgrid(zr=[z0 - dz * 4, z0 + dz * 4], dz=dz)
            ci = zi * 0.
            for i in range(len(zi)):

                if eazyp:
                    out = self.fit_combined_at_z(z=zi[i],
                                                 eazyp=eazyp,
                                                 ix=ix,
                                                 order=order,
                                                 scale_fit=scale_fit)
                    ci[i], bg, full, coeffs, err, scale_fit = out
                else:
                    out = self.fit_at_z(z=zi[i],
                                        templates=t_complex,
                                        fitter=fitter)
                    ci[i], bg, full, coeffs, err = out

                # out = self.fit_at_z(z=zi[i], templates=t_complex,
                #                     fitter=fitter)
                #
                # ci[i], bg, full, coeffs, err = out

                if verbose:
                    print('{0:.4f} - {1:10.1f}'.format(zi[i], ci[i]))

            z = np.append(z, zi)
            chi2 = np.append(chi2, ci)

        so = np.argsort(z)
        z = z[so]
        chi2 = chi2[so]

        # Apply the prior
        if prior is not None:
            pz = np.interp(z, prior[0], prior[1])
            chi2 += pz

        # Get the fit with the individual line templates at the best redshift
        chi2x, bgz, fullz, coeffs, err = self.fit_at_z(z=z[np.argmin(chi2)],
                                                       templates=t_i,
                                                       fitter=fitter,
                                                       get_uncertainties=True)

        # Mask outliers
        if outlier_threshold > 0:
            resid = self.scif - fullz - bgz
            outlier_mask = (resid * self.sivarf < outlier_threshold)
            #outlier_mask &= self.fit_mask
            #self.sivarf[outlier_mask] = 1/resid[outlier_mask]
            #print('Mask {0} pixels with resid > {1} sigma'.format((outlier_mask).sum(), outlier_threshold))

            print('Mask {0} pixels with resid > {1} sigma'.format(
                (~outlier_mask & self.fit_mask).sum(), outlier_threshold))
            self.fit_mask &= outlier_mask
            #self.DoF = self.fit_mask.sum() #(self.ivar > 0).sum()
            self.DoF = int((self.fit_mask * self.weightf).sum())

        # Table with z, chi-squared
        t = grizli.utils.GTable()
        t['z'] = z
        t['chi2'] = chi2

        if prior is not None:
            t['prior'] = pz

        # "area" parameter for redshift quality.
        num = np.trapz(np.clip(chi2 - chi2.min(), 0, 25), z)
        denom = np.trapz(z * 0 + 25, z)
        area25 = 1 - num / denom

        # "best" redshift
        zbest = z[np.argmin(chi2)]

        # Metadata will be stored as header keywords in the FITS table
        t.meta = OrderedDict()
        t.meta['ID'] = (self.h0['ID'], 'Object ID')
        t.meta['RA'] = (self.h0['RA'], 'Right Ascension')
        t.meta['DEC'] = (self.h0['DEC'], 'Declination')
        t.meta['Z'] = (zbest, 'Best-fit redshift')
        t.meta['CHIMIN'] = (chi2.min(), 'Min Chi2')
        t.meta['CHIMAX'] = (chi2.max(), 'Min Chi2')
        t.meta['DOF'] = (self.DoF, 'Degrees of freedom')
        t.meta['AREA25'] = (area25, 'Area under CHIMIN+25')
        t.meta['FITTER'] = (fitter, 'Minimization algorithm')
        t.meta['HASPRIOR'] = (prior is not None, 'Was prior specified?')

        # Best-fit templates
        tc, tl = self.generate_1D_templates(coeffs,
                                            templates_file=templates_file)
        # for i, te in enumerate(t_i):
        #     if i == 0:
        #         tc = t_i[te].zscale(0, scalar=coeffs[i])
        #         tl = t_i[te].zscale(0, scalar=coeffs[i])
        #     else:
        #         if te.startswith('line'):
        #             tc += t_i[te].zscale(0, scalar=0.)
        #         else:
        #             tc += t_i[te].zscale(0, scalar=coeffs[i])
        #
        #         tl += t_i[te].zscale(0, scalar=coeffs[i])

        # Get line fluxes, uncertainties and EWs
        il = 0
        for i, te in enumerate(t_i):
            if te.startswith('line'):
                il += 1

                if coeffs[i] == 0:
                    EW = 0.
                else:
                    tn = (t_i[te].zscale(0, scalar=coeffs[i]) +
                          tc.zscale(0, scalar=1))

                    td = (t_i[te].zscale(0, scalar=0) + tc.zscale(0, scalar=1))

                    clip = (td.wave <= t_i[te].wave.max())
                    clip &= (td.wave >= t_i[te].wave.min())

                    EW = np.trapz((tn.flux / td.flux)[clip] - 1, td.wave[clip])
                    if not np.isfinite(EW):
                        EW = -1000.

                t.meta['LINE{0:03d}F'.format(il)] = (coeffs[i],
                                                     '{0} line flux'.format(
                                                         te[5:]))

                t.meta['LINE{0:03d}E'.format(il)] = (
                    err[i], '{0} line flux uncertainty'.format(te[5:]))

                #print('xxx EW', EW)
                t.meta['LINE{0:03d}W'.format(il)] = (
                    EW, '{0} line rest EQW'.format(te[5:]))

        tfile = self.file.replace('.fits', '.zfit.fits')
        if os.path.exists(tfile):
            os.remove(tfile)

        t.write(tfile)

        ### Add image HDU and templates
        hdu = pyfits.open(tfile, mode='update')
        hdu[1].header['EXTNAME'] = 'ZFIT'

        # oned_templates = np.array([tc.wave*(1+zbest), tc.flux/(1+zbest),
        #                            tl.flux/(1+zbest)])
        header = pyfits.Header()
        header['TEMPFILE'] = (templates_file, 'File with stored templates')
        hdu.append(pyfits.ImageHDU(data=coeffs, name='COEFFS'))

        for i in range(self.Next):
            E = self.E[i]
            model_i = fullz[self.slices[i]].reshape(E.sh)
            bg_i = bgz[self.slices[i]].reshape(E.sh)

            model_i[~np.isfinite(model_i)] = 0
            bg_i[~np.isfinite(bg_i)] = 0

            hdu.append(
                pyfits.ImageHDU(data=model_i, header=E.header, name='MODEL'))

            hdu.append(
                pyfits.ImageHDU(data=bg_i, header=E.header, name='BACKGROUND'))

        hdu.flush()

        if make_plot:
            self.make_fit_plot(hdu)

        return hdu
Example #21
0
def gr2fits(filename, merge=True, verbose=False):

    try:
        handle = open(filename, 'rb')
    except:
        raise IOError('File %s does not exist' % filename)

    nheaders = 0
    nread = 0
    baseband = []

    fitsout = fits.HDUList()

    while (True):
        """
        /opt/local/bin/gr_read_file_metadata
        note that there can be > 1 metadata blocks
        I think they can come every 1e6 items
        """
        # read out next header bytes
        hdr_start = handle.tell()
        header_str = handle.read(parse_file_metadata.HEADER_LENGTH)
        if (len(header_str) == 0):
            break

        # Convert from string to PMT (should be a dictionary)
        try:
            header = pmt.deserialize_str(header_str)
        except RuntimeError:
            raise IOError(
                "Could not deserialize header: invalid or corrupt data file.\n"
            )

        if verbose:
            print("HEADER {0}".format(nheaders))
        info = parse_file_metadata.parse_header(header, verbose)
        if (info["extra_len"] > 0):
            extra_str = handle.read(info["extra_len"])
            if (len(extra_str) == 0):
                break

            try:
                extra = pmt.deserialize_str(extra_str)
            except RuntimeError:
                sys.stderr.write(
                    "Could not deserialize extras: invalid or corrupt data file.\n"
                )
                break

            if verbose:
                print("\nExtra Header:")
            extra_info = parse_file_metadata.parse_extra_dict(
                extra, info, verbose)

        nheaders += 1
        nread += parse_file_metadata.HEADER_LENGTH + info["extra_len"]
        handle.seek(nread, 0)
        h = extra_info
        if h['size'] == 8 and h['cplx']:
            dtype = scipy.complex64

        d = scipy.fromfile(handle, dtype=dtype, count=h['nitems'])
        t0 = np.arange(2 * len(d)) / h['rx_rate'] / 2
        t = np.arange(len(d)) / h['rx_rate']

        nread += info['nbytes']
        handle.seek(nread, 0)

        fitsout.append(fits.ImageHDU(data=np.c_[d.real, d.imag]))
        fitsout[-1].header['NITEMS'] = (h['nitems'],
                                        'Number of complex samples')
        fitsout[-1].header['RATE'] = (h['rx_rate'], '[Hz] sample rate')
        fitsout[-1].header['RX_FREQ'] = (pmt.to_float(h['rx_freq']) / 1e6,
                                         '[MHz] Radio frequency')
        fitsout[-1].header['RX_TIME'] = (h['rx_time'],
                                         '[s] Time of start of block')

    if merge:
        totallen = 0
        for i in xrange(0, len(fitsout)):
            totallen += fitsout[i].header['NAXIS2']
        d = np.zeros((totallen, 2), dtype=fitsout[1].data.dtype)
        nmax = 0
        for i in xrange(0, len(fitsout)):
            d[nmax:nmax + fitsout[i].header['NAXIS2']] = fitsout[i].data
            nmax += fitsout[i].header['NAXIS2']
        newfitsout = fits.HDUList()
        newfitsout.append(fits.PrimaryHDU(data=d))
        newfitsout[0].header = fitsout[1].header
        newfitsout[0].header['NITEMS'] = totallen
        newfitsout[0].header['EXPTIME'] = (d.shape[0] /
                                           newfitsout[0].header['RATE'],
                                           '[s] Duration of file')
        fitsout = newfitsout

    fitsout.verify('silentfix')
    if os.path.exists(filename + '.fits'):
        os.remove(filename + '.fits')
    fitsout.writeto(filename + '.fits')
    print('Wrote %s.fits' % filename)
    return fitsout
At the time this file was generated, the 'CIE' extension name referred to the 
sky west camera, the 'CIW' extension name referred to the sky east camera, the
'CIN' extension name referred to the central camera, and the 'CIC' extension 
name referred to the sky north camera.
"""
fname_orig = '/project/projectdirs/desi/users/ameisner/CI/post_install_calibs/CI_master_bias-20190330.fits'

# key = old, value = new
mapping = {'CIE' : 'CIW', 
           'CIN' : 'CIC', 
           'CIC' : 'CIN',
           'CIS' : 'CIS', 
           'CIW' : 'CIE'}

hdulist = []
for k,v in mapping.items():
    im, h = fits.getdata(fname_orig, header=True, extname=k)

    h['EXTNAME'] = v

    if len(hdulist) == 0:
        hdu = fits.PrimaryHDU(im, header=h)
    else:
        hdu = fits.ImageHDU(im, header=h)
    hdulist.append(hdu)

outname = '/project/projectdirs/desi/users/ameisner/CI/post_install_calibs/CI_master_bias.fits'
assert(not os.path.exists(outname))
hdulist = fits.HDUList(hdulist)
hdulist.writeto(outname)
Example #23
0
def unpackFITS(h5IN, h5archive, overwrite=True):
    # ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~
    """ Package contents of an h5 block to multi-extention FITS files """
    """ 
    MAJOR BUG: This does not like the ExtendLinked HDF5 files one bit... Only 
    real blocks. No idea why. 
    
    (1) Read h5 baby block of symbolic links. 
    (2) Count number of target blocks. 
    (3) Begin loop over packaging function. 
      -- Write a single row of the SAMI Master as a primary HDU. 
      -- Write each dataset as a FITS extension with corresponding header. 
    (4) Inform user of FITS file screated, exit successfully. 
    """

    # Open h5 file.
    hdf = h5.File(h5IN, 'r')

    # Count number of target blocks.
    version = hdf['/SAMI'].keys()[0]  # = getVersion(h5IN, hdf, '')
    # *** Assuming only one version of data available.
    g_version = hdf['/SAMI/' + version]

    nTarget = 0
    nCalibrator = 0

    if 'Target' in g_version.keys():
        nTarget = len(g_version['Target'].keys())
        gTarget = g_version['Target']
        thereAreTargets = True
    if 'Calibrator' in g_version.keys():
        nCalibrator = len(g_version['Calibrator'].keys())
        gCalibrator = g_version['Calibrator']
        thereAreCalibrators = True

    nGroups = nTarget + nCalibrator

    def plural(nGroups):
        plural = ''
        if nGroups > 1: plural == 's'
        return (plural)

    print("Identified "+str(nGroups)+" Target Block"+plural(nGroups)+\
          " in '"+h5IN+"'.")

    def stripTable(name, version, h5archive):
        #master = hdf['/SAMI/'+version+'/Table/SAMI_MASTER']
        h5archive = h5.File(h5archive, 'r')
        master = h5archive['/SAMI/' + version + '/Table/SAMI_MASTER']
        tabline = master[master["CATID"] == int(name)][0]
        # For now excluding all strings to make FITS-compatible
        # *** BUT HEADER will not know that.
        hdu = [v for v in tabline if not isinstance(v, str)]
        hdr = makeHead(master)
        h5archive.close()
        return (hdu, hdr)

    # Begin loop over all SAMI targets requested.
    # *** CURRENTLY ONLY Targets, not Calibrators. Combine groups in a list?
    for thisG in range(nTarget):

        # What is the SAMI name of this target?
        name = gTarget.keys()[thisG]

        # Search for 'Cube' and 'RSS' among Dsets to define output filename
        areThereCubes = ['Cube' in s for s in gTarget[name].keys()]
        areThereRSS = ['RSS' in s for s in gTarget[name].keys()]
        sContents = []
        if sum(areThereCubes) > 0: sContents.append('cubes')
        if sum(areThereRSS) > 0: sContents.append('RSS')
        if len(sContents) > 1: sContents = '_'.join(sContents)
        else: sContents = sContents[0]

        # Define output filename
        fname = 'SAMI_' + name + '_' + sContents + '.fits'

        # Primary HDU is a single row of the Master table.
        hdu0, hdr0 = stripTable(name, version, h5archive)
        hdulist = pf.HDUList([pf.PrimaryHDU(hdu0, header=hdr0)])

        # Cycle through all dsets, make HDUs and headers with native names.

        # Get number of datasets.
        thisTarget = gTarget[name]
        nDsets = len(thisTarget.keys())

        # Begin loop through all datasets.
        for thisDset in range(nDsets):
            #for thisDset in range(5):

            # Determine dataset.
            dsetName = thisTarget.keys()[thisDset]
            print("Processing dataset '" + dsetName + "'...")

            # Create dataset and populate header.
            data = thisTarget[dsetName]
            hdr = makeHead(data)

            # Add all this to an HDU.
            hdulist.append(
                pf.ImageHDU(np.array(thisTarget[dsetName]),
                            name=dsetName,
                            header=makeHead(data)))

        # Write to a new FITS file.
        hdulist.writeto(fname, clobber=overwrite)

    hdf.close()
Example #24
0
    def to_hdu(self,
               hdu_mask='MASK',
               hdu_uncertainty='UNCERT',
               hdu_flags=None,
               wcs_relax=True):
        """Creates an HDUList object from a CCDData object.

        Parameters
        ----------
        hdu_mask, hdu_uncertainty, hdu_flags : str or None, optional
            If it is a string append this attribute to the HDUList as
            `~astropy.io.fits.ImageHDU` with the string as extension name.
            Flags are not supported at this time. If ``None`` this attribute
            is not appended.
            Default is ``'MASK'`` for mask, ``'UNCERT'`` for uncertainty and
            ``None`` for flags.

        wcs_relax : bool
            Value of the ``relax`` parameter to use in converting the WCS to a
            FITS header using `~astropy.wcs.WCS.to_header`. The common
            ``CTYPE`` ``RA---TAN-SIP`` and ``DEC--TAN-SIP`` requires
            ``relax=True`` for the ``-SIP`` part of the ``CTYPE`` to be
            preserved.

        Raises
        -------
        ValueError
            - If ``self.mask`` is set but not a `numpy.ndarray`.
            - If ``self.uncertainty`` is set but not a
              `~astropy.nddata.StdDevUncertainty`.
            - If ``self.uncertainty`` is set but has another unit then
              ``self.data``.

        NotImplementedError
            Saving flags is not supported.

        Returns
        -------
        hdulist : `~astropy.io.fits.HDUList`
        """
        if isinstance(self.header, fits.Header):
            # Copy here so that we can modify the HDU header by adding WCS
            # information without changing the header of the CCDData object.
            header = self.header.copy()
        else:
            # Because _insert_in_metadata_fits_safe is written as a method
            # we need to create a dummy CCDData instance to hold the FITS
            # header we are constructing. This probably indicates that
            # _insert_in_metadata_fits_safe should be rewritten in a more
            # sensible way...
            dummy_ccd = CCDData([1], meta=fits.Header(), unit="adu")
            for k, v in self.header.items():
                dummy_ccd._insert_in_metadata_fits_safe(k, v)
            header = dummy_ccd.header
        if self.unit is not u.dimensionless_unscaled:
            header['bunit'] = self.unit.to_string()
        if self.wcs:
            # Simply extending the FITS header with the WCS can lead to
            # duplicates of the WCS keywords; iterating over the WCS
            # header should be safer.
            #
            # Turns out if I had read the io.fits.Header.extend docs more
            # carefully, I would have realized that the keywords exist to
            # avoid duplicates and preserve, as much as possible, the
            # structure of the commentary cards.
            #
            # Note that until astropy/astropy#3967 is closed, the extend
            # will fail if there are comment cards in the WCS header but
            # not header.
            wcs_header = self.wcs.to_header(relax=wcs_relax)
            header.extend(wcs_header, useblanks=False, update=True)
        hdus = [fits.PrimaryHDU(self.data, header)]

        if hdu_mask and self.mask is not None:
            # Always assuming that the mask is a np.ndarray (check that it has
            # a 'shape').
            if not hasattr(self.mask, 'shape'):
                raise ValueError('only a numpy.ndarray mask can be saved.')

            # Convert boolean mask to uint since io.fits cannot handle bool.
            hduMask = fits.ImageHDU(self.mask.astype(np.uint8), name=hdu_mask)
            hdus.append(hduMask)

        if hdu_uncertainty and self.uncertainty is not None:
            # We need to save some kind of information which uncertainty was
            # used so that loading the HDUList can infer the uncertainty type.
            # No idea how this can be done so only allow StdDevUncertainty.
            if self.uncertainty.__class__.__name__ != 'StdDevUncertainty':
                raise ValueError('only StdDevUncertainty can be saved.')

            # Assuming uncertainty is an StdDevUncertainty save just the array
            # this might be problematic if the Uncertainty has a unit differing
            # from the data so abort for different units. This is important for
            # astropy > 1.2
            if (hasattr(self.uncertainty, 'unit')
                    and self.uncertainty.unit is not None
                    and self.uncertainty.unit != self.unit):
                raise ValueError('saving uncertainties with a unit differing'
                                 'from the data unit is not supported.')

            hduUncert = fits.ImageHDU(self.uncertainty.array,
                                      name=hdu_uncertainty)
            hdus.append(hduUncert)

        if hdu_flags and self.flags:
            raise NotImplementedError('adding the flags to a HDU is not '
                                      'supported at this time.')

        hdulist = fits.HDUList(hdus)

        return hdulist
Example #25
0
def export_OLD(name,
               h5file,
               get_cube=False,
               get_rss=False,
               colour='',
               all_versions=False):
    # ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~
    """ The main SAMI_DB .fits export function 
 
    name       [str]  The name(s) of the SAMI target(s) required. 
    h5file     [str]  The SAMI archive file from which to export. 
    get_cube   [boo]  Export data-cube(s). 
    get_rss    [boo]  Export RSS file(s). 
    rss_centre [boo]  Export only central RSS file. *UNDER CONSTRUCTION*
    get_sdss   [boo]  Fetch an SDSS g' cutout. *UNDER CONSTRUCTION*
    get_meta   [boo]  Fetch emission line maps. *UNDER CONSTRUCTION*
    colour     [str]  Colour-specific export. Set to 'blue' or 'red'. 
    outfile    [str]  The name of the file to be output. Default: "col_name".
    """

    # Check that some data have been requested:
    if (not get_cube) and (not get_rss):
        raise SystemExit("Please raise at least one of the 'get???' flags.")

    # Open HDF5 file and run a series of tests and diagnostics.
    hdf = h5.File(h5file, 'r')

    SAMIformatted = checkSAMIformat(hdf)  # Check for SAMI formatting
    if version == '':  # Get the data version
        version = getVersion(h5file, hdf, version)
    obstype = getObstype(hdf, name, version)  # Get observation type
    g_target = getTargetGroup(
        hdf,
        name,  # ID target group 
        version,
        obstype)
    if colour == '':  # Check for monochrome output
        colour = ['B', 'R']

    # Look for cubes:
    if ('Blue_cube_data' not in targ_group.keys()) or \
       ('Blue_cube_variance' not in targ_group.keys()) or \
       ('Blue_cube_weight' not in targ_group.keys()) or \
       ('Red_cube_data' not in targ_group.keys()) or \
       ('Red_cube_variance' not in targ_group.keys()) or \
       ('Red_cube_weight' not in targ_group.keys()):

        raise SystemExit(
            'The target block is incomplete, please check archive.')

    # Start the HDU and extension lists
    all_hdu = []
    all_ext = []

    # For now add a dummy Primary HDU
    phdu = pf.PrimaryHDU(np.arange(10))
    all_hdu.append(phdu)

    for col in range(len(colour)):

        # Figure out what will go in this multi-extension fits file.
        # ***(Should also think about somple .h5 outputs)***

        if get_cube:

            data = targ_group[colour[col] + '_Cube_Data']
            var = targ_group[colour[col] + '_Cube_Variance']
            wht = targ_group[colour[col] + '_Cube_Weight']

            all_ext.append(colour[col] + ' Cube Data')
            all_hdu.append(
                pf.ImageHDU(np.array(data), name=colour[col] + ' DAT'))

            all_ext.append(colour[col] + ' Cube Variance')
            all_hdu.append(
                pf.ImageHDU(np.array(var), name=colour[col] + ' VAR'))

            all_ext.append(colour[col] + ' Cube Weight')
            all_hdu.append(
                pf.ImageHDU(np.array(wht), name=colour[col] + ' WHT'))

        if get_rss:
            for irss in range(7):

                rss_data = targ_group[colour[col] + '_RSS_data_' +
                                      str(irss + 1)]
                rss_var = targ_group[colour[col] + '_RSS_variance_' +
                                     str(irss + 1)]

                all_ext.append(colour[col] + ' RSS Data ' + str(irss + 1))
                all_hdu.append(
                    pf.ImageHDU(np.array(rss_data),
                                name=colour[col] + ' RSS DAT ' +
                                str(irss + 1)))

                all_ext.append(colour[col] + ' RSS Variance ' + str(irss + 1))
                all_hdu.append(
                    pf.ImageHDU(np.array(rss_var),
                                name=colour[col] + ' RSS VAR ' +
                                str(irss + 1)))
        """

        # Build Primary Header Unit as a table listing all extensions
        all_ext = []
        if get_cube: 
            n_ext_cube = len(colour)*3
            all_ext.append('Extensions 1-'+str(n_ext_cube)+' = Datacube')
            
        if get_rss:
            n_ext_rss = len(colour)*7
            all_ext.append('Extensions '+str(n_ext_cube+1)+'-'+
                           n_ext_cube+1+n_ext_rss+' = RSS strips')

        # Make primary unit, just that list of strings. 
        phu = pf.PrimaryHDU(all_ext)
        
        hdu_c1 = pf.ImageHDU(np.array(data), hdr1, name=colour[col]+' DATA')
        """

    # Write HDU list to and All_Hdu
    hdulist = pf.HDUList(all_hdu)
    hdulist.writeto('dummy_export.fits')

    # Close HDU, h5, and wrap it up.
    hdulist.close()
    hdf.close()
Example #26
0
    def make_measurement(datafile,
                         error,
                         outfile,
                         rms=None,
                         masknan=True,
                         overwrite=False,
                         unit="adu"):
        """Create a FITS files with 2 HDUS, the first being the datavalue and the 2nd being 
        the data uncertainty. This format makes allows the resulting file to be read into the underlying :class:'~astropy.nddata.CCDData` class.

        :param datafile: The FITS file containing the data as a function of spatial coordinates
        :type datafile: str
        :param error: The errors on the data Possible values for error are:

             - a filename with the same shape as datafile containing the error values per pixel
             - a percentage value 'XX%' must have the "%" symbol in it
             - 'rms' meaning use the rms parameter if given, otherwise look for the RMS keyword in the FITS header of the datafile

        :type error: str
        :param outfile: The output file to write the result in (FITS format)
        :type outfile: str
        :param rms:  If error == 'rms', this value may give the rms in same units as data (e.g 'erg s-1 cm-2 sr-1').
        :type rms: float or :class:`astropy.units.Unit`
        :param masknan: Whether to mask any pixel where the data or the error is NaN. Default:true
        :type masknan: bool
        :param overwrite: If `True`, overwrite the output file if it exists. Default: `False`.
        :type overwrite: bool
        :param unit: Intensity unit to use for the data, this will override BUNIT in header if present.
        :type unit: :class:`astropy.units.Unit` or str

        :raises Exception: on various FITS header issues
        :raises OSError: if `overwrite` is `False` and the output file exists.

        Example usage:
        
        .. code-block:: python

            # example with percentage error
            Measurement.make_measurement("my_infile.fits",error='10%',outfile="my_outfile.fits")

            # example with measurement in units of K km/s and error 
            # indicated by RMS keyword in input file.
            Measurement.make_measurement("my_infile.fits",error='rms',outfile="my_outfile.fits",unit="K km/s",overwrite=True)
        """
        _data = fits.open(datafile)
        needsclose = False
        if error == 'rms':
            _error = deepcopy(_data)
            if rms is None:
                rms = _data[0].header.get("RMS", None)
                if rms is None:
                    raise Exception(
                        "rms not given as parameter and RMS keyword not present in data header"
                    )
                else:
                    print("Found RMS in header: %.2E %s" %
                          (rms, _error[0].data.shape))
            tmp = np.full(_error[0].data.shape, rms)
            _error[0].data[:] = rms
        elif "%" in error:
            percent = float(error.strip('%')) / 100.0
            _error = deepcopy(_data)
            _error[0].data = _data[0].data * percent
        else:
            _error = fits.open(error)
            needsclose = True

        fb = _data[0].header.get('bunit',
                                 str(unit))  #use str in case Unit was given
        eb = _error[0].header.get('bunit', str(unit))
        if fb != eb:
            raise Exception(
                "BUNIT must be the same in both data (%s) and error (%s) maps"
                % (fb, eb))
        # Sigh, this is necessary since there is no mode available in
        # fits.open that will truncate an existing file for writing
        if overwrite and exists(outfile):
            remove(outfile)
        _out = fits.open(name=outfile, mode="ostream")
        _out.append(_data[0])
        _out[0].header['bunit'] = fb
        _out.append(_error[0])
        _out[1].header['extname'] = 'UNCERT'
        _out[1].header['bunit'] = eb
        _out[1].header['utype'] = 'StdDevUncertainty'
        if masknan:
            fmasked = ma.masked_invalid(_data[0].data)
            emasked = ma.masked_invalid(_error[0].data)
            final_mask = utils.mask_union([fmasked, emasked])
            # Convert boolean mask to uint since io.fits cannot handle bool.
            hduMask = fits.ImageHDU(final_mask.astype(np.uint8), name='MASK')
            _out.append(hduMask)
        _out.writeto(outfile, overwrite=overwrite)
        _data.close()
        _out.close()
        if needsclose: _error.close()
Example #27
0
def CDF_contour(linename,
                objname,
                fitting_table,
                plot=True,
                cont=False,
                pix_cen=None,
                output=None,
                print_obj=False,
                sigma_floor=0,
                nofits=False,
                ggd37=False):

    from scipy.interpolate import griddata
    from mpl_toolkits.axes_grid1 import make_axes_locatable
    from astropy.io import ascii, fits
    import numpy as np
    import matplotlib.pyplot as plt

    # get the line data
    fitting = ascii.read(fitting_table)
    data = fitting[(fitting['Object'] == objname)
                   & (fitting['Line'] == linename) &
                   (fitting['Pixel_No.'] != 'c')]

    # determine the name of the central spaxel
    if pix_cen == None:
        pix_cen_list = ['SLWC3', 'SSWD4', '13']
        pix_cen = data['Pixel_No.'][(data['Pixel_No.'] == pix_cen_list[0])+\
                                    (data['Pixel_No.'] == pix_cen_list[1])+\
                                    (data['Pixel_No.'] == pix_cen_list[2])]

    # get the RA & Dec of the central spaxel
    ra_cen = data['RA(deg)'][data['Pixel_No.'] == pix_cen].data
    dec_cen = data['Dec(deg)'][data['Pixel_No.'] == pix_cen].data

    # calculate the RA and Dec separations
    plot_ra = (data['RA(deg)'].data - ra_cen) * np.cos(
        np.radians(dec_cen)) * 3600
    plot_dec = (data['Dec(deg)'].data - dec_cen) * 3600

    # determine the size of the contour plot
    size = np.ceil(max(abs(plot_ra).max(), abs(plot_dec).max()) / 10) * 10

    # round up the ranges of RA and Dec separations, and calculate the number of points in between
    ra_range = [
        np.ceil(plot_ra.max() / 10) * 10,
        np.ceil(plot_ra.min() / 10) * 10,
        np.ceil((plot_ra.max() - plot_ra.min()) / 10) * 10
    ]
    dec_range = [
        np.ceil(plot_dec.max() / 10) * 10,
        np.ceil(plot_dec.min() / 10) * 10,
        np.ceil((plot_dec.max() - plot_dec.min()) / 10) * 10
    ]

    # create the rebinned grid for RA and Dec.  Use oversample of 4.
    ra_rebin = np.linspace(ra_range[0], ra_range[1], ra_range[2] * 4)
    dec_rebin = np.linspace(dec_range[1], dec_range[0], dec_range[2] * 4)

    # use the rebinned RA and Dec to re-grid the contours, both line and continuum
    if ggd37:
        selector = (plot_dec <= 0.)
        plot_ra = plot_ra[selector]
        plot_dec = plot_dec[selector]
        data = data[selector]

    z = griddata((plot_ra, plot_dec),
                 data['Str(W/cm2)'].data,
                 (ra_rebin[None, :], dec_rebin[:, None]),
                 method='cubic')
    z_cont = griddata(
        (plot_ra, plot_dec),
        data['Base(W/cm2/um)'].data * data['FWHM(um)'].data * 1.086,
        (ra_rebin[None, :], dec_rebin[:, None]),
        method='cubic')

    # calculate the noise floor for the line emission
    sigma = np.nanmin(data['Str(W/cm2)'] / data['SNR'].data)
    if sigma_floor != 0:
        z_floor = sigma * sigma_floor
    else:
        z_floor = np.nanmin(z)

    # create the figure and axis objects
    fig = plt.figure()
    ax = fig.add_subplot(111)

    # plot the contour with color and lines
    levels = np.linspace(z_floor, np.nanmax(z), 10)[1:]
    ax.contour(ra_rebin, dec_rebin, z, levels, linewidths=1.5, cmap='Reds')

    # whether show the continuum as image or line emission as image
    if cont:
        im = ax.imshow(
            z_cont,
            cmap='Blues',
            origin='lower',
            extent=[ra_range[0], ra_range[1], dec_range[1], dec_range[0]])
        im_label = 'F_{cont.}'
    else:
        im = ax.imshow(
            z,
            cmap='Blues',
            origin='lower',
            extent=[ra_range[0], ra_range[1], dec_range[1], dec_range[0]])
        im_label = 'F_{line}'

    # set the bad pixel to white
    im.cmap.set_bad('w', 1.)
    # setup ticks and tick labels
    ax.set_xticks(
        np.linspace(-np.ceil(-ra_range[1] / 10) * 10,
                    np.ceil(ra_range[0] / 10) * 10,
                    5,
                    dtype='int'))
    ax.set_xticklabels(
        np.linspace(-np.ceil(-ra_range[1] / 10) * 10,
                    np.ceil(ra_range[0] / 10) * 10,
                    5,
                    dtype='int'))
    ax.set_yticks(
        np.linspace(-np.ceil(-dec_range[1] / 10) * 10,
                    np.ceil(dec_range[0] / 10) * 10,
                    5,
                    dtype='int'))
    ax.set_yticklabels(
        np.linspace(-np.ceil(-dec_range[1] / 10) * 10,
                    np.ceil(dec_range[0] / 10) * 10,
                    5,
                    dtype='int'))

    # create the colorbar
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)
    cb = fig.colorbar(im, cax=cax)
    cb.solids.set_edgecolor("face")
    cb.ax.minorticks_on()
    cb.ax.set_ylabel(r'$\rm' + im_label + '\,[W\,cm^{-2}]$', fontsize=16)
    cb_obj = plt.getp(cb.ax.axes, 'yticklabels')
    plt.setp(cb_obj, fontsize=12)

    # set the x, y labels
    ax.set_xlabel(r'$\rm{RA\,offset\,[arcsec]}$', fontsize=18)
    ax.set_ylabel(r'$\rm{Dec\,offset\,[arcsec]}$', fontsize=18)
    [
        ax.spines[axis].set_linewidth(1.5)
        for axis in ['top', 'bottom', 'left', 'right']
    ]
    ax.minorticks_on()
    ax.tick_params('both',
                   labelsize=14,
                   width=1.5,
                   which='major',
                   pad=5,
                   length=5)
    ax.tick_params('both',
                   labelsize=14,
                   width=1.5,
                   which='minor',
                   pad=5,
                   length=2.5)
    ax.set_aspect('equal', 'datalim')

    # print the object name
    if print_obj:
        ax.text(0.9,
                0.9,
                objname,
                transform=ax.transAxes,
                fontsize=18,
                ha='right')

    # output the interpolated 2D array and the RA/Dec arrays into FITS and ASCII files
    if output != None:
        if not nofits:
            hdulist = fits.HDUList([fits.PrimaryHDU(z), fits.ImageHDU(z_cont)])
            hdulist.writeto(output, overwrite=True)

        # write out the RA/Dec arrays
        foo = open(output.split('.')[0] + '_interpolated_RA.txt', 'w')
        # the coordinates of the reference pixel
        foo.write('# Pixel 1: {:<12.8f} / {:<12.8f}\n'.format(
            ra_cen[0], dec_cen[0]))
        foo.write('{:<12s}\n'.format('RA_offset'))
        for i, ra_dum in enumerate(ra_rebin):
            foo.write('{:<12.8f}\n'.format(ra_dum))
        foo.close()

        foo = open(output.split('.')[0] + '_interpolated_Dec.txt', 'w')
        # the coordinates of the reference pixel
        foo.write('# Pixel 1: {:<12.8f} / {:<12.8f}\n'.format(
            ra_cen[0], dec_cen[0]))
        foo.write('{:<12s}\n'.format('Dec_offset'))
        for i, dec_dum in enumerate(dec_rebin):
            foo.write('{:<12.8f}\n'.format(dec_dum))
        foo.close()

        fig.savefig(output.split('.')[0] + '_contour.pdf',
                    format='pdf',
                    dpi=300,
                    bbox_inches='tight')
        print('Figure saved at ', output.split('.')[0] + '_contour.pdf')

    return (ra_rebin, dec_rebin), (z, z_cont)
Example #28
0
def save_2d_images(sci_output,
                   raw_header,
                   spectrograph,
                   master_key_dict,
                   mfdir,
                   outfile,
                   clobber=True,
                   update_det=None):
    """ Write 2D images to the hard drive

    Args:
        sci_output (OrderedDict):
        raw_header (astropy.fits.Header or dict):
        master_key_dict (str):
        mfdir (str):
        outfile (str):
        clobber: bool, optional

    Returns:

    """
    if os.path.isfile(outfile) and update_det is not None:
        hdus, prihdu = io.init_hdus(update_det, outfile)
    else:
        # Primary HDU for output
        prihdu = fits.PrimaryHDU()
        # Update with original header, skipping a few keywords
        hdus = [prihdu]
        hdukeys = [
            'BUNIT', 'COMMENT', '', 'BITPIX', 'NAXIS', 'NAXIS1', 'NAXIS2',
            'HISTORY', 'EXTEND', 'DATASEC'
        ]
        for key in raw_header.keys():
            # Use new ones
            if key in hdukeys:
                continue
            # Update unused ones
            prihdu.header[key] = raw_header[key]
        # History
        if 'HISTORY' in raw_header.keys():
            # Strip \n
            tmp = str(raw_header['HISTORY']).replace('\n', ' ')
            prihdu.header.add_history(str(tmp))

        # PYPEIT
        # TODO Should the spectrograph be written to the header?
        prihdu.header['PIPELINE'] = str('PYPEIT')
        prihdu.header['PYPELINE'] = spectrograph.pypeline
        prihdu.header['SPECTROG'] = spectrograph.spectrograph
        prihdu.header['DATE-RDX'] = str(
            datetime.date.today().strftime('%Y-%b-%d'))
        prihdu.header['FRAMMKEY'] = master_key_dict['frame'][:-3]
        prihdu.header['BPMMKEY'] = master_key_dict['bpm'][:-3]
        prihdu.header['BIASMKEY'] = master_key_dict['bias'][:-3]
        prihdu.header['ARCMKEY'] = master_key_dict['arc'][:-3]
        prihdu.header['TRACMKEY'] = master_key_dict['trace'][:-3]
        prihdu.header['FLATMKEY'] = master_key_dict['flat'][:-3]
        prihdu.header['PYPMFDIR'] = str(mfdir)
        if sci_output['meta']['ir_redux']:
            prihdu.header['SKYSUB'] = 'DIFF'
        else:
            prihdu.header['SKYSUB'] = 'MODEL'

    # Fill in the images
    ext = len(hdus) - 1
    for key in sci_output.keys():
        if key in ['meta']:
            continue
        else:
            det = key
        sdet = parse.get_dnum(det, caps=True)  # e.g. DET02
        if 'sciimg' not in sci_output[det]:
            continue
        # Specified detector number?
        #if settings.argflag['reduce']['detnum'] is not None:
        #    if det not in map(int, settings.argflag['reduce']['detnum']):
        #        continue
        #    else:
        #        msgs.warn("Restricting the reduction to detector {:d}".format(det))

        # Processed frame
        ext += 1
        keywd = 'EXT{:04d}'.format(ext)
        prihdu.header[keywd] = '{:s}-Processed'.format(sdet)
        hdu = fits.ImageHDU(sci_output[det]['sciimg'])  #slf._sciframe[det-1])
        hdu.name = prihdu.header[keywd]
        hdus.append(hdu)

        # Raw Inverse Variance
        ext += 1
        keywd = 'EXT{:04d}'.format(ext)
        prihdu.header[keywd] = '{:s}-IVARRAW'.format(sdet)
        hdu = fits.ImageHDU(
            sci_output[det]['sciivar'])  #slf._modelvarframe[det-1])
        hdu.name = prihdu.header[keywd]
        hdus.append(hdu)

        # Background model
        ext += 1
        keywd = 'EXT{:04d}'.format(ext)
        prihdu.header[keywd] = '{:s}-SKY'.format(sdet)
        hdu = fits.ImageHDU(
            sci_output[det]['skymodel'])  #slf._modelvarframe[det-1])
        hdu.name = prihdu.header[keywd]
        hdus.append(hdu)

        # Object model
        ext += 1
        keywd = 'EXT{:04d}'.format(ext)
        prihdu.header[keywd] = '{:s}-OBJ'.format(sdet)
        hdu = fits.ImageHDU(
            sci_output[det]['objmodel'])  #slf._modelvarframe[det-1])
        hdu.name = prihdu.header[keywd]
        hdus.append(hdu)

        # Inverse Variance model
        ext += 1
        keywd = 'EXT{:04d}'.format(ext)
        prihdu.header[keywd] = '{:s}-IVARMODEL'.format(sdet)
        hdu = fits.ImageHDU(
            sci_output[det]['ivarmodel'])  # slf._modelvarframe[det-1])
        hdu.name = prihdu.header[keywd]
        hdus.append(hdu)

        # Final mask
        ext += 1
        keywd = 'EXT{:04d}'.format(ext)
        prihdu.header[keywd] = '{:s}-MASK'.format(sdet)
        hdu = fits.ImageHDU(
            sci_output[det]['outmask'])  # slf._modelvarframe[det-1])
        hdu.name = prihdu.header[keywd]
        hdus.append(hdu)

    # Finish
    hdulist = fits.HDUList(hdus)
    hdulist.writeto(outfile, overwrite=clobber)
    msgs.info("Wrote: {:s}".format(outfile))
Example #29
0
def writeSingleFITS(data, wcs, output, template, clobber=True, verbose=True):
    """ Write out a simple FITS file given a numpy array and the name of another
    FITS file to use as a template for the output image header.
    """
    outname, outextn = fileutil.parseFilename(output)
    outextname, outextver = fileutil.parseExtn(outextn)

    if fileutil.findFile(outname):
        if clobber:
            log.info('Deleting previous output product: %s' % outname)
            fileutil.removeFile(outname)

        else:
            log.warning('Output file %s already exists and overwrite not '
                        'specified!' % outname)
            log.error('Quitting... Please remove before resuming operations.')
            raise IOError

    # Now update WCS keywords with values from provided WCS
    if hasattr(wcs.sip, 'a_order'):
        siphdr = True
    else:
        siphdr = False
    wcshdr = wcs.wcs2header(sip2hdr=siphdr)

    if template is not None:
        # Get default headers from multi-extension FITS file
        # If input data is not in MEF FITS format, it will return 'None'
        # NOTE: These are HEADER objects, not HDUs
        (prihdr, scihdr, errhdr,
         dqhdr), newtab = getTemplates(template, EXTLIST)

        if scihdr is None:
            scihdr = fits.Header()
            indx = 0
            for c in prihdr.cards:
                if c.keyword not in ['INHERIT', 'EXPNAME']: indx += 1
                else: break
            for i in range(indx, len(prihdr)):
                scihdr.append(prihdr.cards[i])
            for i in range(indx, len(prihdr)):
                del prihdr[indx]
    else:
        scihdr = fits.Header()
        prihdr = fits.Header()
        # Start by updating PRIMARY header keywords...
        prihdr.set('EXTEND', value=True, after='NAXIS')
        prihdr['FILENAME'] = outname

    if outextname == '':
        outextname = 'sci'
    if outextver == 0: outextver = 1
    scihdr['EXTNAME'] = outextname.upper()
    scihdr['EXTVER'] = outextver

    for card in wcshdr.cards:
        scihdr[card.keyword] = (card.value, card.comment)

    # Create PyFITS HDUList for all extensions
    outhdu = fits.HDUList()
    # Setup primary header as an HDU ready for appending to output FITS file
    prihdu = fits.PrimaryHDU(header=prihdr)
    scihdu = fits.ImageHDU(header=scihdr, data=data)

    outhdu.append(prihdu)
    outhdu.append(scihdu)
    outhdu.writeto(outname)

    if verbose:
        print('Created output image: %s' % outname)
Example #30
0
    sfile = f'{wdir}/corrected/{xmode}_stacked_CCD{j:02}_{rev0:04}_{rev1:04}_0056_corr.fits.gz'
    if (not os.path.isfile(sfile)):
        print(f'No stacked file found: {sfile}')
        raise FileNotFoundError
    t = Table.read(sfile)
    print('Doing CCD:', j)
    ww = run_fit_for_mnka(t, use_column=args.use_column, verbose=False)
    output[j - 1, :, :] = ww[0]
    output_err[j - 1, :, :] = ww[1]
    output_redchi[j - 1, :, :] = ww[2]
    output_nevts[j - 1, :, :] = ww[3]
#
# save to a FITS image
#
hdu0 = fits.PrimaryHDU()
hdu1 = fits.ImageHDU(output, name='RESIDUALS')
hdu2 = fits.ImageHDU(output_err, name='ERRORS')
hdu3 = fits.ImageHDU(output_redchi, name='CHI2_R')
hdu4 = fits.ImageHDU(output_nevts, name='NEVENTS')

hdul = fits.HDUList([hdu0, hdu1, hdu2, hdu3, hdu4])
hdu0.header['REV0'] = rev0
hdu0.header['REV1'] = rev1
hdu0.header['MODE'] = xmode
hdu0.header[
    'HISTORY'] = f'Created by Ivan V, using fit_mnka_picorr, {date.today()}'
hdu0.header['COMMENT'] = f'Using events with args.use_column'
hdul.writeto(savefile, overwrite=True)
#
print(f'Results for [{rev0},{rev1}] saved to {savefile}')
print("*** All done")