Ejemplo n.º 1
0
def applyNpolCorr(fname, unpolcorr):
    """
    Determines whether non-polynomial distortion lookup tables should be added
    as extensions to the science file based on the 'NPOLFILE' keyword in the
    primary header and NPOLEXT kw in the first extension.
    This is a default correction and will always run in the pipeline.
    The file used to generate the extensions is
    recorded in the NPOLEXT keyword in the first science extension.
    If 'NPOLFILE' in the primary header is different from 'NPOLEXT' in the
    extension header and the file exists on disk and is a 'new type' npolfile,
    then the lookup tables will be updated as 'WCSDVARR' extensions.

    Parameters
    ----------
    fname : `~astropy.io.fits.HDUList`
        Input FITS file object.

    """
    applyNPOLCorr = True
    try:
        # get NPOLFILE kw from primary header
        fnpol0 = fname[0].header['NPOLFILE']
        if fnpol0 == 'N/A':
            utils.remove_distortion(fname, "NPOLFILE")
            return False
        fnpol0 = fileutil.osfn(fnpol0)
        if not fileutil.findFile(fnpol0):
            msg = '"NPOLFILE" exists in primary header but file {0} not found.'
            'Non-polynomial distortion correction will not be applied.'.format(fnpol0)
            logger.critical(msg)
            raise IOError("NPOLFILE {0} not found".format(fnpol0))
        try:
            # get NPOLEXT kw from first extension header
            fnpol1 = fname[1].header['NPOLEXT']
            fnpol1 = fileutil.osfn(fnpol1)
            if fnpol1 and fileutil.findFile(fnpol1):
                if fnpol0 != fnpol1:
                    applyNPOLCorr = True
                else:
                    msg = """\n\tNPOLEXT with the same value as NPOLFILE found in first extension.
                             NPOL correction will not be applied."""
                    logger.info(msg)
                    applyNPOLCorr = False
            else:
                # npl file defined in first extension may not be found
                # but if a valid kw exists in the primary header, non-polynomial
                # distortion correction should be applied.
                applyNPOLCorr = True
        except KeyError:
            # the case of "NPOLFILE" kw present in primary header but "NPOLEXT" missing
            # in first extension header
            applyNPOLCorr = True
    except KeyError:
        logger.info('\n\t"NPOLFILE" keyword not found in primary header')
        applyNPOLCorr = False
        return applyNPOLCorr

    if isOldStyleDGEO(fname, fnpol0):
            applyNPOLCorr = False
    return (applyNPOLCorr and unpolcorr)
Ejemplo n.º 2
0
def applyNpolCorr(fname, unpolcorr):
    """
    Determines whether non-polynomial distortion lookup tables should be added
    as extensions to the science file based on the 'NPOLFILE' keyword in the
    primary header and NPOLEXT kw in the first extension.
    This is a default correction and will always run in the pipeline.
    The file used to generate the extensions is
    recorded in the NPOLEXT keyword in the first science extension.
    If 'NPOLFILE' in the primary header is different from 'NPOLEXT' in the
    extension header and the file exists on disk and is a 'new type' npolfile,
    then the lookup tables will be updated as 'WCSDVARR' extensions.
    """
    applyNPOLCorr = True
    try:
        # get NPOLFILE kw from primary header
        fnpol0 = fits.getval(fname, 'NPOLFILE')
        if fnpol0 == 'N/A':
            utils.remove_distortion(fname, "NPOLFILE")
            return False
        fnpol0 = fileutil.osfn(fnpol0)
        if not fileutil.findFile(fnpol0):
            msg = """\n\tKw "NPOLFILE" exists in primary header but file %s not found
                      Non-polynomial distortion correction will not be applied\n
                    """ % fnpol0
            logger.critical(msg)
            raise IOError("NPOLFILE {0} not found".format(fnpol0))
        try:
            # get NPOLEXT kw from first extension header
            fnpol1 = fits.getval(fname, 'NPOLEXT', ext=1)
            fnpol1 = fileutil.osfn(fnpol1)
            if fnpol1 and fileutil.findFile(fnpol1):
                if fnpol0 != fnpol1:
                    applyNPOLCorr = True
                else:
                    msg = """\n\tNPOLEXT with the same value as NPOLFILE found in first extension.
                             NPOL correction will not be applied."""
                    logger.info(msg)
                    applyNPOLCorr = False
            else:
                # npl file defined in first extension may not be found
                # but if a valid kw exists in the primary header, non-polynomial
                #distortion correction should be applied.
                applyNPOLCorr = True
        except KeyError:
            # the case of "NPOLFILE" kw present in primary header but "NPOLEXT" missing
            # in first extension header
            applyNPOLCorr = True
    except KeyError:
        logger.info('\n\t"NPOLFILE" keyword not found in primary header')
        applyNPOLCorr = False
        return applyNPOLCorr

    if isOldStyleDGEO(fname, fnpol0):
        applyNPOLCorr = False
    return (applyNPOLCorr and unpolcorr)
Ejemplo n.º 3
0
def checkPA_V3(fnames):
    removed_files = []
    for f in fnames:
        toclose = False
        if isinstance(f, str):
            f = fits.open(f)
            toclose = True
        try:
            pav3 = f[0].header['PA_V3']
        except KeyError:
            rootname = f[0].header['ROOTNAME']
            sptfile = rootname + '_spt.fits'
            if fileutil.findFile(sptfile):
                try:
                    pav3 = fits.getval(sptfile, 'PA_V3')
                except KeyError:
                    print("Warning:  Files without keyword PA_V3 detected")
                    removed_files.append(f.filename() or "")
                f[0].header['PA_V3'] = pav3
            else:
                print("Warning:  Files without keyword PA_V3 detected")
                removed_files.append(f.filename() or "")
        if toclose:
            f.close()
    if removed_files != []:
        print("Warning:  Removing the following files from input list")
        for f in removed_files:
            print('\t', f)
    return removed_files
Ejemplo n.º 4
0
def checkPA_V3(fnames):
    removed_files = []
    for f in fnames:
        toclose = False
        if isinstance(f, str):
            f = fits.open(f)
            toclose = True
        try:
            pav3 = f[0].header['PA_V3']
        except KeyError:
            rootname = f[0].header['ROOTNAME']
            sptfile = rootname+'_spt.fits'
            if fileutil.findFile(sptfile):
                try:
                    pav3 = fits.getval(sptfile, 'PA_V3')
                except KeyError:
                    print("Warning:  Files without keyword PA_V3 detected")
                    removed_files.append(f.filename() or "")
                f[0].header['PA_V3'] = pav3
            else:
                print("Warning:  Files without keyword PA_V3 detected")
                removed_files.append(f.filename() or "")
        if toclose:
            f.close()
    if removed_files != []:
        print("Warning:  Removing the following files from input list")
        for f in removed_files:
            print('\t',f)
    return removed_files
Ejemplo n.º 5
0
def _getExposure(img, output, frame, idckey):

    # setup Exposure object with distortion model and WCS info
    if frame == 'input':
        if img.find('[') < 0:
            img += '[sci]'
            _val = fileutil.getKeyword(img, 'CD1_1')
            if _val == None:
                img[:-7] += '[0]'

        if fileutil.getKeyword(img, 'CD1_1') == None:
            print("Input %s not valid!" % img)
            raise Exception

        _exp = pydrizzle.Exposure(img, idckey)
        _wcs = _exp.geometry.wcslin

    else:
        # Use output frame for converting pixel shifts
        # to units of arcseconds...
        # Make sure we have a recognized file:
        if not fileutil.findFile(output):
            if fileutil.findFile(output + '.fits'):
                output += '.fits'
            else:
                print("Can NOT find output file %s!" % output)
                raise Exception

        if output.find('[') < 0:
            output += '[0]'
            _val = fileutil.getKeyword(output, 'CD1_1')
            if _val == None:
                output[:-3] += '[sci,1]'

        if fileutil.getKeyword(output, 'CD1_1') == None:
            print("Output %s not valid!" % output)
            raise Exception

        _exp = pydrizzle.Exposure(output, idckey)
        _wcs = _exp.geometry.wcs

    return _wcs
Ejemplo n.º 6
0
def applyD2ImCorr(fname, d2imcorr):
    applyD2IMCorr = True
    try:
        # get D2IMFILE kw from primary header
        fd2im0 = fits.getval(fname, 'D2IMFILE')
        if fd2im0 == 'N/A':
            utils.remove_distortion(fname, "D2IMFILE")
            return False
        fd2im0 = fileutil.osfn(fd2im0)
        if not fileutil.findFile(fd2im0):
            msg = """\n\tKw D2IMFILE exists in primary header but file %s not found\n
                     Detector to image correction will not be applied\n""" % fd2im0
            logger.critical(msg)
            print(msg)
            raise IOError("D2IMFILE {0} not found".format(fd2im0))
        try:
            # get D2IMEXT kw from first extension header
            fd2imext = fits.getval(fname, 'D2IMEXT', ext=1)
            fd2imext = fileutil.osfn(fd2imext)
            if fd2imext and fileutil.findFile(fd2imext):
                if fd2im0 != fd2imext:
                    applyD2IMCorr = True
                else:
                    applyD2IMCorr = False
            else:
                # D2IM file defined in first extension may not be found
                # but if a valid kw exists in the primary header,
                # detector to image correction should be applied.
                applyD2IMCorr = True
        except KeyError:
            # the case of D2IMFILE kw present in primary header but D2IMEXT missing
            # in first extension header
            applyD2IMCorr = True
    except KeyError:
        print('D2IMFILE keyword not found in primary header')
        applyD2IMCorr = False
        return applyD2IMCorr
Ejemplo n.º 7
0
def applyD2ImCorr(fname, d2imcorr):
    applyD2IMCorr = True
    try:
        # get D2IMFILE kw from primary header
        fd2im0 = fits.getval(fname, 'D2IMFILE')
        if fd2im0 == 'N/A':
            utils.remove_distortion(fname, "D2IMFILE")
            return False
        fd2im0 = fileutil.osfn(fd2im0)
        if not fileutil.findFile(fd2im0):
            msg = """\n\tKw D2IMFILE exists in primary header but file %s not found\n
                     Detector to image correction will not be applied\n""" % fd2im0
            logger.critical(msg)
            print(msg)
            raise IOError("D2IMFILE {0} not found".format(fd2im0))
        try:
            # get D2IMEXT kw from first extension header
            fd2imext = fits.getval(fname, 'D2IMEXT', ext=1)
            fd2imext = fileutil.osfn(fd2imext)
            if fd2imext and fileutil.findFile(fd2imext):
                if fd2im0 != fd2imext:
                    applyD2IMCorr = True
                else:
                    applyD2IMCorr = False
            else:
                # D2IM file defined in first extension may not be found
                # but if a valid kw exists in the primary header,
                # detector to image correction should be applied.
                applyD2IMCorr = True
        except KeyError:
            # the case of D2IMFILE kw present in primary header but D2IMEXT missing
            # in first extension header
            applyD2IMCorr = True
    except KeyError:
        print('D2IMFILE keyword not found in primary header')
        applyD2IMCorr = False
        return applyD2IMCorr
Ejemplo n.º 8
0
def checkPA_V3(fnames):
    removed_files = []
    for f in fnames:
        try:
            pav3 = fits.getval(f, 'PA_V3')
        except KeyError:
            rootname = fits.getval(f, 'ROOTNAME')
            sptfile = rootname+'_spt.fits'
            if fileutil.findFile(sptfile):
                try:
                    pav3 = fits.getval(sptfile, 'PA_V3')
                except KeyError:
                    print("Warning:  Files without keyword PA_V3 detected")
                    removed_files.append(f)
                fits.setval(f, 'PA_V3', value=pav3)
            else:
                print("Warning:  Files without keyword PA_V3 detected")
                removed_files.append(f)
    if removed_files != []:
        print("Warning:  Removing the following files from input list")
        for f in removed_files:
            print('\t',f)
    return removed_files
Ejemplo n.º 9
0
def checkPA_V3(fnames):
    removed_files = []
    for f in fnames:
        try:
            pav3 = fits.getval(f, 'PA_V3')
        except KeyError:
            rootname = fits.getval(f, 'ROOTNAME')
            sptfile = rootname + '_spt.fits'
            if fileutil.findFile(sptfile):
                try:
                    pav3 = fits.getval(sptfile, 'PA_V3')
                except KeyError:
                    print("Warning:  Files without keyword PA_V3 detected")
                    removed_files.append(f)
                fits.setval(f, 'PA_V3', value=pav3)
            else:
                print("Warning:  Files without keyword PA_V3 detected")
                removed_files.append(f)
    if removed_files != []:
        print("Warning:  Removing the following files from input list")
        for f in removed_files:
            print('\t', f)
    return removed_files
Ejemplo n.º 10
0
def apply_d2im_correction(fname, d2imcorr):
    """
    Logic to decide whether to apply the D2IM correction.

    Parameters
    ----------
    fname : `~astropy.io.fits.HDUList` or str
        Input FITS science file object.
    d2imcorr : bool
        Flag indicating if D2IM is should be enabled if allowed.

    Return
    ------
    applyD2IMCorr : bool
        Flag whether to apply the correction.

    The D2IM correction is applied to a science file if it is in the
    allowed corrections for the instrument. The name of the file
    with the correction is saved in the ``D2IMFILE`` keyword in the
    primary header. When the correction is applied the name of the
    file is saved in the ``D2IMEXT`` keyword in the 1st extension header.

    """
    fname, toclose = _toclose(fname)

    applyD2IMCorr = True
    if not d2imcorr:
        logger.info("D2IM correction not requested - not applying it.")
        return False
    # get D2IMFILE kw from primary header
    try:
        fd2im0 = fname[0].header['D2IMFILE']
    except KeyError:
        logger.info("D2IMFILE keyword is missing - D2IM correction will not be applied.")
        return False
    if fd2im0 == 'N/A':
        utils.remove_distortion(fname, "D2IMFILE")
        return False
    fd2im0 = fileutil.osfn(fd2im0)
    if not fileutil.findFile(fd2im0):
        message = "D2IMFILE {0} not found.".format(fd2im0)
        logger.critical(message)
        raise IOError(message)
    try:
        # get D2IMEXT kw from first extension header
        fd2imext = fname[1].header['D2IMEXT']

    except KeyError:
        # the case of D2IMFILE kw present in primary header but D2IMEXT missing
        # in first extension header
        return True
    fd2imext = fileutil.osfn(fd2imext)
    if fd2imext and fileutil.findFile(fd2imext):
        if fd2im0 != fd2imext:
            applyD2IMCorr = True
        else:
            applyD2IMCorr = False
    else:
        # D2IM file defined in first extension may not be found
        # but if a valid kw exists in the primary header,
        # detector to image correction should be applied.
        applyD2IMCorr = True
    if toclose:
        fname.close()
    return applyD2IMCorr
Ejemplo n.º 11
0
    def writeFITS(self,
                  template,
                  sciarr,
                  whtarr,
                  ctxarr=None,
                  versions=None,
                  overwrite=yes,
                  blend=True,
                  virtual=False,
                  rules_file=None,
                  logfile=None):
        """
        Generate PyFITS objects for each output extension
        using the file given by 'template' for populating
        headers.

        The arrays will have the size specified by 'shape'.
        """
        if not isinstance(template, list):
            template = [template]

        if fileutil.findFile(self.output):
            if overwrite:
                log.info('Deleting previous output product: %s' % self.output)
                fileutil.removeFile(self.output)

            else:
                log.warning('Output file %s already exists and overwrite not '
                            'specified!' % self.output)
                log.error('Quitting... Please remove before resuming '
                          'operations.')
                raise IOError

        # initialize output value for this method
        outputFITS = {}
        # Default value for NEXTEND when 'build'== True
        nextend = 3
        if not self.build:
            nextend = 0
            if self.outweight:
                if overwrite:
                    if fileutil.findFile(self.outweight):
                        log.info('Deleting previous output WHT product: %s' %
                                 self.outweight)
                    fileutil.removeFile(self.outweight)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outweight)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError

            if self.outcontext:
                if overwrite:
                    if fileutil.findFile(self.outcontext):
                        log.info('Deleting previous output CTX product: %s' %
                                 self.outcontext)
                    fileutil.removeFile(self.outcontext)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outcontext)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError

        # Get default headers from multi-extension FITS file
        # If only writing out single drizzle product, blending needs to be
        # forced off as there is only 1 input to report, no blending needed
        if self.single:
            blend = False

        # If input data is not in MEF FITS format, it will return 'None'
        # and those headers will have to be generated from drizzle output
        # file FITS headers.
        # NOTE: These are HEADER objects, not HDUs
        self.fullhdrs, intab = getTemplates(template,
                                            blend=False,
                                            rules_file=rules_file)

        newhdrs, newtab = getTemplates(template,
                                       blend=blend,
                                       rules_file=rules_file)
        if newtab is not None: nextend += 1  # account for new table extn

        prihdr = newhdrs[0]
        scihdr = newhdrs[1]
        errhdr = newhdrs[2]
        dqhdr = newhdrs[3]

        # Setup primary header as an HDU ready for appending to output FITS file
        prihdu = fits.PrimaryHDU(header=prihdr, data=None)

        # Start by updating PRIMARY header keywords...
        prihdu.header.set('EXTEND', value=True, after='NAXIS')
        prihdu.header['NEXTEND'] = nextend
        prihdu.header['FILENAME'] = self.output
        prihdu.header['PROD_VER'] = 'DrizzlePac {}'.format(__version__)
        prihdu.header['DRIZPARS'] = (logfile, "Logfile for processing")

        # Update the ROOTNAME with the new value as well
        _indx = self.output.find('_drz')
        if _indx < 0:
            rootname_val = self.output
        else:
            rootname_val = self.output[:_indx]
        prihdu.header['ROOTNAME'] = rootname_val

        # Get the total exposure time for the image
        # If not calculated by PyDrizzle and passed through
        # the pardict, then leave value from the template image.
        if self.texptime:
            prihdu.header['EXPTIME'] = self.texptime
            prihdu.header.set('TEXPTIME', value=self.texptime, after='EXPTIME')
            prihdu.header['EXPSTART'] = self.expstart
            prihdu.header['EXPEND'] = self.expend

        # Update ASN_MTYPE to reflect the fact that this is a product
        # Currently hard-wired to always output 'PROD-DTH' as MTYPE
        prihdu.header['ASN_MTYP'] = 'PROD-DTH'

        # Update DITHCORR calibration keyword if present
        # Remove when we can modify FITS headers in place...
        if 'DRIZCORR' in prihdu.header:
            prihdu.header['DRIZCORR'] = 'COMPLETE'
        if 'DITHCORR' in prihdu.header:
            prihdu.header['DITHCORR'] = 'COMPLETE'

        prihdu.header['NDRIZIM'] = (len(self.parlist),
                                    'Drizzle, No. images drizzled onto output')

        # Only a subset of these keywords makes sense for the new WCS based
        # transformations. They need to be reviewed to decide what to keep
        # and what to leave out.
        if not self.blot:
            self.addDrizKeywords(prihdu.header, versions)

        if scihdr:
            scihdr.pop('OBJECT', None)

            if 'CCDCHIP' in scihdr: scihdr['CCDCHIP'] = '-999'
            if 'NCOMBINE' in scihdr:
                scihdr['NCOMBINE'] = self.parlist[0]['nimages']

            # If BUNIT keyword was found and reset, then
            bunit_last_kw = self.find_kwupdate_location(scihdr, 'bunit')
            if self.bunit is not None:
                comment_str = "Units of science product"
                if self.bunit.lower()[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                scihdr.set('BUNIT',
                           value=self.bunit,
                           comment=comment_str,
                           after=bunit_last_kw)
            else:
                # check to see whether to update already present BUNIT comment
                if 'bunit' in scihdr and scihdr['bunit'].lower(
                )[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                    scihdr.set('BUNIT',
                               value=scihdr['bunit'],
                               comment=comment_str,
                               after=bunit_last_kw)

            # Add WCS keywords to SCI header
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(scihdr, 'CD1_1')
                addWCSKeywords(self.wcs,
                               scihdr,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
                # Recompute this after removing distortion kws
                pre_wcs_kw = self.find_kwupdate_location(scihdr, 'CD1_1')

        ##########
        # Now, build the output file
        ##########
        if self.build:
            print('-Generating multi-extension output file: ', self.output)
            fo = fits.HDUList()

            # Add primary header to output file...
            fo.append(prihdu)

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=sciarr,
                                        header=scihdr,
                                        name=EXTLIST[0])
            else:
                hdu = fits.ImageHDU(data=sciarr,
                                    header=scihdr,
                                    name=EXTLIST[0])
            last_kw = self.find_kwupdate_location(scihdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='SCI', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            fo.append(hdu)

            # Build WHT extension here, if requested...
            if errhdr:
                errhdr['CCDCHIP'] = '-999'

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=whtarr,
                                        header=errhdr,
                                        name=EXTLIST[1])
            else:
                hdu = fits.ImageHDU(data=whtarr,
                                    header=errhdr,
                                    name=EXTLIST[1])
            last_kw = self.find_kwupdate_location(errhdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='WHT', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header, 'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,
                               hdu.header,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
            fo.append(hdu)

            # Build CTX extension here
            # If there is only 1 plane, write it out as a 2-D extension
            if self.outcontext:
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr
            else:
                _ctxarr = None

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=_ctxarr,
                                        header=dqhdr,
                                        name=EXTLIST[2])
            else:
                hdu = fits.ImageHDU(data=_ctxarr,
                                    header=dqhdr,
                                    name=EXTLIST[2])
            last_kw = self.find_kwupdate_location(dqhdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='CTX', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')

            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header, 'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,
                               hdu.header,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
            fo.append(hdu)

            # remove all alternate WCS solutions from headers of this product
            wcs_functions.removeAllAltWCS(fo, [1])

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual:
                print('Writing out to disk:', self.output)
                # write out file to disk
                fo.writeto(self.output)
                fo.close()
                del fo, hdu
                fo = None
            # End 'if not virtual'
            outputFITS[self.output] = fo

        else:
            print('-Generating simple FITS output: %s' % self.outdata)

            fo = fits.HDUList()
            hdu_header = prihdu.header.copy()
            del hdu_header['nextend']

            # Append remaining unique header keywords from template DQ
            # header to Primary header...
            if scihdr:
                for _card in scihdr.cards:
                    if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu_header:
                        hdu_header.append(_card)

            hdu_header['filename'] = self.outdata

            if self.compress:
                hdu = fits.CompImageHDU(data=sciarr, header=hdu_header)
                wcs_ext = [1]
            else:
                hdu = fits.PrimaryHDU(data=sciarr, header=hdu_header)
                wcs_ext = [0]

            # explicitly set EXTEND to FALSE for simple FITS files.
            dim = len(sciarr.shape)
            hdu.header.set('extend', value=False, after='NAXIS%s' % dim)

            # explicitly remove EXTNAME, EXTVER from header
            # since this header may have been used
            # to create a CompImageHDU instance instead of a PrimaryHDU instance
            for kw in ['EXTNAME', 'EXTVER', 'PCOUNT', 'GCOUNT']:
                hdu.header.pop(kw, None)

            hdu.header.set('filetype',
                           'SCI',
                           before='TELESCOP',
                           comment='Type of data in array')

            # Add primary header to output file...
            fo.append(hdu)

            if not self.blot:
                # remove all alternate WCS solutions from headers of this product
                logutil.logging.disable(logutil.logging.INFO)
                wcs_functions.removeAllAltWCS(fo, wcs_ext)
                logutil.logging.disable(logutil.logging.NOTSET)

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual or "single_sci" in self.outdata:
                print('Writing out image to disk:', self.outdata)
                # write out file to disk
                fo.writeto(self.outdata, overwrite=True)
                del hdu
                if "single_sci" not in self.outdata:
                    del fo
                    fo = None
            # End 'if not virtual'
            outputFITS[self.outdata] = fo

            if self.outweight and whtarr is not None:
                # We need to build new PyFITS objects for each WHT array
                fwht = fits.HDUList()

                if errhdr:
                    errhdr['CCDCHIP'] = '-999'

                if self.compress:
                    hdu = fits.CompImageHDU(data=whtarr, header=prihdu.header)
                else:
                    hdu = fits.PrimaryHDU(data=whtarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if errhdr:
                    for _card in errhdr.cards:
                        if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu.header:
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outweight
                hdu.header['CCDCHIP'] = '-999'

                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(
                        hdu.header, 'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,
                                   hdu.header,
                                   blot=self.blot,
                                   single=self.single,
                                   after=pre_wcs_kw)

                # explicitly remove EXTNAME, EXTVER from header
                # since this header may have been used
                # to create a CompImageHDU instance instead of a PrimaryHDU instance
                for kw in ['EXTNAME', 'EXTVER', 'PCOUNT', 'GCOUNT']:
                    hdu.header.pop(kw, None)

                hdu.header.set('filetype',
                               'WHT',
                               before='TELESCOP',
                               comment='Type of data in array')

                # Add primary header to output file...
                fwht.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fwht, wcs_ext)

                if not virtual:
                    print('Writing out image to disk:', self.outweight)
                    fwht.writeto(self.outweight, overwrite=True)
                    del fwht, hdu
                    fwht = None
                # End 'if not virtual'
                outputFITS[self.outweight] = fwht

            # If a context image was specified, build a PyFITS object
            # for it as well...
            if self.outcontext and ctxarr is not None:
                fctx = fits.HDUList()

                # If there is only 1 plane, write it out as a 2-D extension
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr

                if self.compress:
                    hdu = fits.CompImageHDU(data=_ctxarr, header=prihdu.header)
                else:
                    hdu = fits.PrimaryHDU(data=_ctxarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if dqhdr:
                    for _card in dqhdr.cards:
                        if ((_card.keyword not in RESERVED_KEYS)
                                and _card.keyword not in hdu.header):
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outcontext

                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(
                        hdu.header, 'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,
                                   hdu.header,
                                   blot=self.blot,
                                   single=self.single,
                                   after=pre_wcs_kw)

                # explicitly remove EXTNAME, EXTVER from header
                # since this header may have been used
                # to create a CompImageHDU instance instead of a PrimaryHDU instance
                for kw in ['EXTNAME', 'EXTVER', 'PCOUNT', 'GCOUNT']:
                    hdu.header.pop(kw, None)

                hdu.header.set('filetype',
                               'CTX',
                               before='TELESCOP',
                               comment='Type of data in array')

                fctx.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fctx, wcs_ext)
                if not virtual:
                    print('Writing out image to disk:', self.outcontext)
                    fctx.writeto(self.outcontext, overwrite=True)
                    del fctx, hdu
                    fctx = None
                # End 'if not virtual'

                outputFITS[self.outcontext] = fctx

        return outputFITS
Ejemplo n.º 12
0
def run_driz(imageObjectList,output_wcs,paramDict,single,build,wcsmap=None):
    """ Perform drizzle operation on input to create output.
    The input parameters originally was a list
    of dictionaries, one for each input, that matches the
    primary parameters for an ``IRAF`` `drizzle` task.

    This method would then loop over all the entries in the
    list and run `drizzle` for each entry.

    Parameters required for input in paramDict:
        build,single,units,wt_scl,pixfrac,kernel,fillval,
        rot,scale,xsh,ysh,blotnx,blotny,outnx,outny,data
    """
    # Insure that input imageObject is a list
    if not isinstance(imageObjectList, list):
        imageObjectList = [imageObjectList]

    #
    # Setup the versions info dictionary for output to PRIMARY header
    # The keys will be used as the name reported in the header, as-is
    #
    _versions = {'AstroDrizzle':__version__,
                 'PyFITS':util.__fits_version__,
                 'Numpy':util.__numpy_version__}

    # Set sub-sampling rate for drizzling
    #stepsize = 2.0
    log.info('  **Using sub-sampling value of %s for kernel %s' %
             (paramDict['stepsize'], paramDict['kernel']))

    maskval = interpret_maskval(paramDict)

    outwcs = copy.deepcopy(output_wcs)

    # Check for existance of output file.
    if single == False and build == True and fileutil.findFile(
                                imageObjectList[0].outputNames['outFinal']):
        log.info('Removing previous output product...')
        os.remove(imageObjectList[0].outputNames['outFinal'])

    # print out parameters being used for drizzling
    log.info("Running Drizzle to create output frame with WCS of: ")
    output_wcs.printwcs()

    # Will we be running in parallel?
    pool_size = util.get_pool_size(paramDict.get('num_cores'), len(imageObjectList))
    will_parallel = single and pool_size > 1
    if will_parallel:
        log.info('Executing %d parallel workers' % pool_size)
    else:
        if single: # not yet an option for final drizzle, msg would confuse
            log.info('Executing serially')

    # Set parameters for each input and run drizzle on it here.
    #
    # Perform drizzling...

    numctx = 0
    for img in imageObjectList:
        numctx += img._nmembers
    _numctx = {'all':numctx}

    #            if single:
    # Determine how many chips make up each single image
    for img in imageObjectList:
        for chip in img.returnAllChips(extname=img.scienceExt):
            plsingle = chip.outputNames['outSingle']
            if plsingle in _numctx: _numctx[plsingle] += 1
            else: _numctx[plsingle] = 1

    # Compute how many planes will be needed for the context image.
    _nplanes = int((_numctx['all']-1) / 32) + 1
    # For single drizzling or when context is turned off,
    # minimize to 1 plane only...
    if single or imageObjectList[0][1].outputNames['outContext'] in [None,'',' ']:
        _nplanes = 1

    #
    # An image buffer needs to be setup for converting the input
    # arrays (sci and wht) from FITS format to native format
    # with respect to byteorder and byteswapping.
    # This buffer should be reused for each input if possible.
    #
    _outsci = _outwht = _outctx = _hdrlist = None
    if (not single) or \
       ( (single) and (not will_parallel) and (not imageObjectList[0].inmemory) ):
        # Note there are four cases/combinations for single drizzle alone here:
        # (not-inmem, serial), (not-inmem, parallel), (inmem, serial), (inmem, parallel)
        #_outsci=np.zeros((output_wcs._naxis2,output_wcs._naxis1),dtype=np.float32)
        _outsci=np.empty((output_wcs._naxis2,output_wcs._naxis1),dtype=np.float32)
        _outsci.fill(maskval)
        _outwht=np.zeros((output_wcs._naxis2,output_wcs._naxis1),dtype=np.float32)
        # initialize context to 3-D array but only pass appropriate plane to drizzle as needed
        _outctx=np.zeros((_nplanes,output_wcs._naxis2,output_wcs._naxis1),dtype=np.int32)
        _hdrlist = []

    # Keep track of how many chips have been processed
    # For single case, this will determine when to close
    # one product and open the next.
    _chipIdx = 0

    # Remember the name of the 1st image that goes into this particular product
    # Insure that the header reports the proper values for the start of the
    # exposure time used to make this; in particular, TIME-OBS and DATE-OBS.
    template = None

    #
    # Work on each image
    #
    subprocs = []
    for img in imageObjectList:

        chiplist = img.returnAllChips(extname=img.scienceExt)

        # How many inputs should go into this product?
        num_in_prod = _numctx['all']
        if single:
            num_in_prod = _numctx[chiplist[0].outputNames['outSingle']]

        # The name of the 1st image
        fnames = []
        for chip in chiplist:
            fnames.append(chip.outputNames['data'])

        if _chipIdx == 0:
            template = fnames
        else:
            template.extend(fnames)

        # Work each image, possibly in parallel
        if will_parallel:
            # use multiprocessing.Manager only if in parallel and in memory
            if img.inmemory:
                manager = multiprocessing.Manager()
                dproxy = manager.dict(img.virtualOutputs) # copy & wrap it in proxy
                img.virtualOutputs = dproxy

            # parallelize run_driz_img (currently for separate drizzle only)
            p = multiprocessing.Process(target=run_driz_img,
                name='adrizzle.run_driz_img()', # for err msgs
                args=(img,chiplist,output_wcs,outwcs,template,paramDict,
                      single,num_in_prod,build,_versions,_numctx,_nplanes,
                      _chipIdx,None,None,None,None,wcsmap))
            subprocs.append(p)
        else:
            # serial run_driz_img run (either separate drizzle or final drizzle)
            run_driz_img(img,chiplist,output_wcs,outwcs,template,paramDict,
                         single,num_in_prod,build,_versions,_numctx,_nplanes,
                         _chipIdx,_outsci,_outwht,_outctx,_hdrlist,wcsmap)

        # Increment/reset master chip counter
        _chipIdx += len(chiplist)
        if _chipIdx == num_in_prod:
            _chipIdx = 0

    # do the join if we spawned tasks
    if will_parallel:
        mputil.launch_and_wait(subprocs, pool_size) # blocks till all done

    del _outsci,_outwht,_outctx,_hdrlist
Ejemplo n.º 13
0
def buildShadowMaskImage(rootname,detnum,extnum,maskname,replace=yes,bitvalue=None,binned=1):
    """ Builds mask image from WFPC2 shadow calibrations.
      detnum - string value for 'DETECTOR' detector
    """
    # insure detnum is a string
    if  type(detnum) != type(''):
        detnum = repr(detnum)

    _funcroot = '_func_Shadow_WF'

    # build template shadow mask's filename
    _mask = 'wfpc2_inmask'+detnum+'.fits'

    """
    if rootname != None:
        maskname = buildMaskName(fileutil.buildNewRootname(rootname),detnum)
    else:
        maskname = None
    """
    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname) and replace:
        fileutil.removeFile(maskname)

    # Read in info from .c1h file to add flagged bad pixels to final mask
    _indx = rootname.find('.c1h')
    if _indx < 0: _indx = len(rootname)
    if rootname.find('.fits') < 0:
        _dqname = rootname[:_indx]+'.c1h'
    else:
        _dqname = rootname

    _use_inmask = False
    if fileutil.findFile(_dqname) != yes or bitvalue == None:
        _use_inmask = True
    # Check to see if file exists...
    if _use_inmask and not fileutil.findFile(_mask):
    # If not, create the file.
    # This takes a long time to run, so it should be done
    # only when absolutely necessary...
        try:
            _funcx = _funcroot+detnum+'x'
            _funcy = _funcroot+detnum+'y'

            _xarr = np.clip(np.fromfunction(eval(_funcx),(800,800)),0.0,1.0).astype(np.uint8)
            _yarr = np.clip(np.fromfunction(eval(_funcy),(800,800)),0.0,1.0).astype(np.uint8)
            maskarr = _xarr * _yarr

            if binned !=1:
                print('in buildmask', binned)
                bmaskarr = maskarr[::2,::2]
                bmaskarr *= maskarr[1::2,::2]
                bmaskarr *= maskarr[::2,1::2]
                bmaskarr *= maskarr[1::2,1::2]
                maskarr = bmaskarr.copy()
                del bmaskarr

            #Write out the mask file as simple FITS file
            fmask = pyfits.open(_mask,'append')
            maskhdu = pyfits.PrimaryHDU(data=maskarr)
            fmask.append(maskhdu)

            #Close files
            fmask.close()
            del fmask
        except:
            return None


    # Check for existance of input .c1h file for use in making inmask file
    if fileutil.findFile(_dqname) != yes:
        print('DQ file ',_dqname,' NOT found...')
        print('Copying ',_mask,'to ',maskname,' as input mask file.')
        # Now, copy template mask file to output file, if necessary
        fileutil.copyFile(_mask,maskname,replace=yes)
    elif bitvalue == None:
        # If bitvalue was not set, then do not use anything but shadow mask
        fileutil.copyFile(_mask,maskname,replace=yes)
    else:
        #
        # Build full mask based on .c1h and shadow mask
        #
        fdq = fileutil.openImage(_dqname)
        #fsmask = pyfits.open(_mask,memmap=1,mode='readonly')
        try:
            # Read in DQ array from .c1h and from shadow mask files
            dqarr = fdq[int(extnum)].data
            #maskarr = fsmask[0].data

            # Build mask array from DQ array
            dqmaskarr = buildMask(dqarr,bitvalue)

            #Write out the mask file as simple FITS file
            fdqmask = pyfits.open(maskname,'append')
            maskhdu = pyfits.PrimaryHDU(data=dqmaskarr)
            fdqmask.append(maskhdu)

            #Close files
            fdqmask.close()
            del fdqmask
            fdq.close()
            del fdq

        except:
            fdq.close()
            del fdq
            # Safeguard against leaving behind an incomplete file
            if fileutil.findFile(maskname):
                os.remove(maskname)
            _errstr = "\nWarning: Problem creating DQMASK file for "+rootname+".\n"
            #raise IOError, _errstr
            print(_errstr)
            return None


    # Return the name of the mask image written out
    return maskname
Ejemplo n.º 14
0
    def writeFITS(self, template, sciarr, whtarr, ctxarr=None, versions=None, extlist=EXTLIST, overwrite=yes):
        """ Generate PyFITS objects for each output extension
            using the file given by 'template' for populating
            headers.

            The arrays will have the size specified by 'shape'.
        """

        if fileutil.findFile(self.output):
            if overwrite:
                print('Deleting previous output product: ',self.output)
                fileutil.removeFile(self.output)

            else:
                print('WARNING:  Output file ',self.output,' already exists and overwrite not specified!')
                print('Quitting... Please remove before resuming operations.')
                raise IOError

        # Default value for NEXTEND when 'build'== True
        nextend = 3
        if not self.build:
            nextend = 0
            if self.outweight:
                if overwrite:
                    if fileutil.findFile(self.outweight):
                        print('Deleting previous output WHT product: ',self.outweight)
                    fileutil.removeFile(self.outweight)
                else:
                    print('WARNING:  Output file ',self.outweight,' already exists and overwrite not specified!')
                    print('Quitting... Please remove before resuming operations.')
                    raise IOError


            if self.outcontext:
                if overwrite:
                    if fileutil.findFile(self.outcontext):
                        print('Deleting previous output CTX product: ',self.outcontext)
                    fileutil.removeFile(self.outcontext)
                else:
                    print('WARNING:  Output file ',self.outcontext,' already exists and overwrite not specified!')
                    print('Quitting... Please remove before resuming operations.')
                    raise IOError

        # Get default headers from multi-extension FITS file
        # If input data is not in MEF FITS format, it will return 'None'
        # and those headers will have to be generated from drizzle output
        # file FITS headers.
        # NOTE: These are HEADER objects, not HDUs
        prihdr,scihdr,errhdr,dqhdr = getTemplates(template,extlist)

        if prihdr == None:
            # Use readgeis to get header for use as Primary header.
            _indx = template.find('[')
            if _indx < 0:
                _data = template
            else:
                _data = template[:_indx]

            fpri = readgeis.readgeis(_data)
            prihdr = fpri[0].header.copy()
            fpri.close()
            del fpri


        # Setup primary header as an HDU ready for appending to output FITS file
        prihdu = pyfits.PrimaryHDU(header=prihdr,data=None)

        # Start by updating PRIMARY header keywords...
        prihdu.header.update('EXTEND',pyfits.TRUE,after='NAXIS')
        prihdu.header.update('NEXTEND',nextend)
        prihdu.header.update('FILENAME', self.output)

        # Update the ROOTNAME with the new value as well
        _indx = self.output.find('_drz')
        if _indx < 0:
            prihdu.header.update('ROOTNAME', self.output)
        else:
            prihdu.header.update('ROOTNAME', self.output[:_indx])


        # Get the total exposure time for the image
        # If not calculated by PyDrizzle and passed through
        # the pardict, then leave value from the template image.
        if self.texptime:
            prihdu.header.update('EXPTIME', self.texptime)
            prihdu.header.update('EXPSTART', self.expstart)
            prihdu.header.update('EXPEND', self.expend)

        #Update ASN_MTYPE to reflect the fact that this is a product
        # Currently hard-wired to always output 'PROD-DTH' as MTYPE
        prihdu.header.update('ASN_MTYP', 'PROD-DTH')

        # Update DITHCORR calibration keyword if present
        # Remove when we can modify FITS headers in place...
        if 'DRIZCORR' in prihdu.header:
            prihdu.header.update('DRIZCORR','COMPLETE')
        if 'DITHCORR' in prihdu.header:
            prihdu.header.update('DITHCORR','COMPLETE')

        prihdu.header.update('NDRIZIM',len(self.parlist),
            comment='Drizzle, No. images drizzled onto output')

        self.addDrizKeywords(prihdu.header,versions)

        if scihdr:
            del scihdr['OBJECT']
            if 'CCDCHIP' in scihdr: scihdr.update('CCDCHIP','-999')
            if 'NCOMBINE' in scihdr:
                scihdr.update('NCOMBINE', self.parlist[0]['nimages'])

            # If BUNIT keyword was found and reset, then
            if self.bunit is not None:
                scihdr.update('BUNIT',self.bunit,comment="Units of science product")

            if self.wcs:
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                scihdr.update('ORIENTAT',self.wcs.orient)
                scihdr.update('CD1_1',self.wcs.cd11)
                scihdr.update('CD1_2',self.wcs.cd12)
                scihdr.update('CD2_1',self.wcs.cd21)
                scihdr.update('CD2_2',self.wcs.cd22)
                scihdr.update('CRVAL1',self.wcs.crval1)
                scihdr.update('CRVAL2',self.wcs.crval2)
                scihdr.update('CRPIX1',self.wcs.crpix1)
                scihdr.update('CRPIX2',self.wcs.crpix2)
                scihdr.update('VAFACTOR',1.0)
                # Remove any reference to TDD correction
                if 'TDDALPHA' in scihdr:
                    del scihdr['TDDALPHA']
                    del scihdr['TDDBETA']
                # Remove '-SIP' from CTYPE for output product
                if scihdr['ctype1'].find('SIP') > -1:
                    scihdr.update('ctype1', scihdr['ctype1'][:-4])
                    scihdr.update('ctype2',scihdr['ctype2'][:-4])
                    # Remove SIP coefficients from DRZ product
                    for k in scihdr.items():
                        if (k[0][:2] in ['A_','B_']) or (k[0][:3] in ['IDC','SCD'] and k[0] != 'IDCTAB') or \
                        (k[0][:6] in ['SCTYPE','SCRVAL','SNAXIS','SCRPIX']):
                            del scihdr[k[0]]
                self.addPhotKeywords(scihdr,prihdu.header)


        ##########
        # Now, build the output file
        ##########
        if self.build:
            print('-Generating multi-extension output file: ',self.output)
            fo = pyfits.HDUList()

            # Add primary header to output file...
            fo.append(prihdu)

            hdu = pyfits.ImageHDU(data=sciarr,header=scihdr,name=extlist[0])
            fo.append(hdu)

            # Build WHT extension here, if requested...
            if errhdr:
                errhdr.update('CCDCHIP','-999')

            hdu = pyfits.ImageHDU(data=whtarr,header=errhdr,name=extlist[1])
            hdu.header.update('EXTVER',1)
            if self.wcs:
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                hdu.header.update('ORIENTAT',self.wcs.orient)
                hdu.header.update('CD1_1',self.wcs.cd11)
                hdu.header.update('CD1_2',self.wcs.cd12)
                hdu.header.update('CD2_1',self.wcs.cd21)
                hdu.header.update('CD2_2',self.wcs.cd22)
                hdu.header.update('CRVAL1',self.wcs.crval1)
                hdu.header.update('CRVAL2',self.wcs.crval2)
                hdu.header.update('CRPIX1',self.wcs.crpix1)
                hdu.header.update('CRPIX2',self.wcs.crpix2)
                hdu.header.update('VAFACTOR',1.0)

            fo.append(hdu)

            # Build CTX extension here
            # If there is only 1 plane, write it out as a 2-D extension
            if self.outcontext:
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr
            else:
                _ctxarr = None

            hdu = pyfits.ImageHDU(data=_ctxarr,header=dqhdr,name=extlist[2])
            hdu.header.update('EXTVER',1)
            if self.wcs:
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                hdu.header.update('ORIENTAT',self.wcs.orient)
                hdu.header.update('CD1_1',self.wcs.cd11)
                hdu.header.update('CD1_2',self.wcs.cd12)
                hdu.header.update('CD2_1',self.wcs.cd21)
                hdu.header.update('CD2_2',self.wcs.cd22)
                hdu.header.update('CRVAL1',self.wcs.crval1)
                hdu.header.update('CRVAL2',self.wcs.crval2)
                hdu.header.update('CRPIX1',self.wcs.crpix1)
                hdu.header.update('CRPIX2',self.wcs.crpix2)
                hdu.header.update('VAFACTOR',1.0)


            fo.append(hdu)

            fo.writeto(self.output)
            fo.close()
            del fo, hdu

        else:
            print('-Generating simple FITS output: ',self.outdata)
            fo = pyfits.HDUList()

            hdu = pyfits.PrimaryHDU(data=sciarr, header=prihdu.header)

            # Append remaining unique header keywords from template DQ
            # header to Primary header...
            if scihdr:
                for _card in scihdr.ascard:
                    if (_card.key not in RESERVED_KEYS and
                        _card.key not in hdu.header):
                        hdu.header.ascard.append(_card)
            del hdu.header['PCOUNT']
            del hdu.header['GCOUNT']
            self.addPhotKeywords(hdu.header, prihdu.header)
            hdu.header.update('filename', self.outdata)

            # Add primary header to output file...
            fo.append(hdu)
            fo.writeto(self.outdata)
            del fo,hdu

            if self.outweight and whtarr != None:
                # We need to build new PyFITS objects for each WHT array
                fwht = pyfits.HDUList()

                if errhdr:
                    errhdr.update('CCDCHIP','-999')

                hdu = pyfits.PrimaryHDU(data=whtarr, header=prihdu.header)

                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if errhdr:
                    for _card in errhdr.ascard:
                        if (_card.key not in RESERVED_KEYS and
                            _card.key not in hdu.header):
                            hdu.header.ascard.append(_card)
                hdu.header.update('filename', self.outweight)
                hdu.header.update('CCDCHIP','-999')
                if self.wcs:
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    hdu.header.update('ORIENTAT',self.wcs.orient)
                    hdu.header.update('CD1_1',self.wcs.cd11)
                    hdu.header.update('CD1_2',self.wcs.cd12)
                    hdu.header.update('CD2_1',self.wcs.cd21)
                    hdu.header.update('CD2_2',self.wcs.cd22)
                    hdu.header.update('CRVAL1',self.wcs.crval1)
                    hdu.header.update('CRVAL2',self.wcs.crval2)
                    hdu.header.update('CRPIX1',self.wcs.crpix1)
                    hdu.header.update('CRPIX2',self.wcs.crpix2)
                    hdu.header.update('VAFACTOR',1.0)

                # Add primary header to output file...
                fwht.append(hdu)
                fwht.writeto(self.outweight)
                del fwht,hdu

            # If a context image was specified, build a PyFITS object
            # for it as well...
            if self.outcontext and ctxarr != None:
                fctx = pyfits.HDUList()

                # If there is only 1 plane, write it out as a 2-D extension
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr

                hdu = pyfits.PrimaryHDU(data=_ctxarr, header=prihdu.header)

                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if dqhdr:
                    for _card in dqhdr.ascard:
                        if (_card.key not in RESERVED_KEYS and
                            _card.key not in hdu.header):
                            hdu.header.ascard.append(_card)
                hdu.header.update('filename', self.outcontext)
                if self.wcs:
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    hdu.header.update('ORIENTAT',self.wcs.orient)
                    hdu.header.update('CD1_1',self.wcs.cd11)
                    hdu.header.update('CD1_2',self.wcs.cd12)
                    hdu.header.update('CD2_1',self.wcs.cd21)
                    hdu.header.update('CD2_2',self.wcs.cd22)
                    hdu.header.update('CRVAL1',self.wcs.crval1)
                    hdu.header.update('CRVAL2',self.wcs.crval2)
                    hdu.header.update('CRPIX1',self.wcs.crpix1)
                    hdu.header.update('CRPIX2',self.wcs.crpix2)
                    hdu.header.update('VAFACTOR',1.0)

                fctx.append(hdu)
                fctx.writeto(self.outcontext)
                del fctx,hdu
Ejemplo n.º 15
0
def buildShadowMaskImage(dqfile,detnum,extnum,maskname,bitvalue=None,binned=1):
    """ Builds mask image from WFPC2 shadow calibrations.
      detnum - string value for 'DETECTOR' detector
    """
    # insure detnum is a string
    if type(detnum) != type(''):
        detnum = repr(detnum)

    _funcroot = '_func_Shadow_WF'

    # build template shadow mask's filename

    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname):
        fileutil.removeFile(maskname)

    _use_inmask = False
    if fileutil.findFile(dqfile) != True or bitvalue == None:
        _use_inmask = True

    # Check for existance of input .c1h file for use in making inmask file
    if fileutil.findFile(dqfile) != True or bitvalue is None:
        #_mask = 'wfpc2_inmask'+detnum+'.fits'
        _mask = maskname
        # Check to see if file exists...
        if _use_inmask and not fileutil.findFile(_mask):
        # If not, create the file.
        # This takes a long time to run, so it should be done
        # only when absolutely necessary...
            try:
                _funcx = _funcroot+detnum+'x'
                _funcy = _funcroot+detnum+'y'

                _xarr = np.clip(np.fromfunction(eval(_funcx),(800,800)),0.0,1.0).astype(np.uint8)
                _yarr = np.clip(np.fromfunction(eval(_funcy),(800,800)),0.0,1.0).astype(np.uint8)
                maskarr = _xarr * _yarr

                if binned !=1:
                    bmaskarr = maskarr[::2,::2]
                    bmaskarr *= maskarr[1::2,::2]
                    bmaskarr *= maskarr[::2,1::2]
                    bmaskarr *= maskarr[1::2,1::2]
                    maskarr = bmaskarr.copy()
                    del bmaskarr

                #Write out the mask file as simple FITS file
                fmask = fits.open(_mask,'append')
                maskhdu = fits.PrimaryHDU(data=maskarr)
                fmask.append(maskhdu)

                #Close files
                fmask.close()
                del fmask
            except:
                return None

    else:
        #
        # Build full mask based on .c1h and shadow mask
        #
        fdq = fileutil.openImage(dqfile)
        #fsmask = fits.open(_mask,memmap=1,mode='readonly')
        try:
            # Read in DQ array from .c1h and from shadow mask files
            dqarr = fdq[int(extnum)].data
            #maskarr = fsmask[0].data

            # Build mask array from DQ array
            dqmaskarr = buildMask(dqarr,bitvalue)

            #Write out the mask file as simple FITS file
            fdqmask = fits.open(maskname,'append')
            maskhdu = fits.PrimaryHDU(data=dqmaskarr)
            fdqmask.append(maskhdu)

            #Close files
            fdqmask.close()
            del fdqmask
            fdq.close()
            del fdq

        except:
            fdq.close()
            del fdq
            # Safeguard against leaving behind an incomplete file
            if fileutil.findFile(maskname):
                os.remove(maskname)
            _errstr = "\nWarning: Problem creating DQMASK file for "+rootname+".\n"
            #raise IOError, _errstr
            print(_errstr)
            return None


    # Return the name of the mask image written out
    return maskname
Ejemplo n.º 16
0
def buildMaskImage(rootname,bitvalue,output,extname='DQ',extver=1):
    """ Builds mask image from rootname's DQ array
        If there is no valid 'DQ' array in image, then return
        an empty string.
    """

    # If no bitvalue is set or rootname given, assume no mask is desired
    # However, this name would be useful as the output mask from
    # other processing, such as MultiDrizzle, so return it anyway.
    #if bitvalue == None or rootname == None:
    #    return None

    # build output name
    maskname = output

    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname):
        fileutil.removeFile(maskname)

    # Open input file with DQ array
    fdq = fileutil.openImage(rootname,memmap=0,mode='readonly')
    try:
        _extn = fileutil.findExtname(fdq,extname,extver=extver)
        if _extn != None:
            # Read in DQ array
            dqarr = fdq[_extn].data
        else:
            dqarr = None

        # For the case where there is no DQ array,
        # create a mask image of all ones.
        if dqarr == None:
            # We need to get the dimensions of the output DQ array
            # Since the DQ array is non-existent, look for the SCI extension
            _sci_extn = fileutil.findExtname(fdq,'SCI',extver=extver)
            if _sci_extn != None:
                _shape = fdq[_sci_extn].data.shape
                dqarr = np.zeros(_shape,dtype=np.uint16)
            else:
                raise Exception
        # Build mask array from DQ array
        maskarr = buildMask(dqarr,bitvalue)
        #Write out the mask file as simple FITS file
        fmask = fits.open(maskname, 'append')
        maskhdu = fits.PrimaryHDU(data = maskarr)
        fmask.append(maskhdu)

        #Close files
        fmask.close()
        del fmask
        fdq.close()
        del fdq

    except:
        fdq.close()
        del fdq
        # Safeguard against leaving behind an incomplete file
        if fileutil.findFile(maskname):
            os.remove(maskname)
        _errstr = "\nWarning: Problem creating MASK file for "+rootname+".\n"
        #raise IOError, _errstr
        print(_errstr)
        return None

    # Return the name of the mask image written out
    return maskname
Ejemplo n.º 17
0
    def writeFITS(self, template, sciarr, whtarr, ctxarr=None,
                versions=None, overwrite=yes, blend=True, virtual=False):
        """
        Generate PyFITS objects for each output extension
        using the file given by 'template' for populating
        headers.

        The arrays will have the size specified by 'shape'.
        """
        if not isinstance(template, list):
            template = [template]

        if fileutil.findFile(self.output):
            if overwrite:
                log.info('Deleting previous output product: %s' % self.output)
                fileutil.removeFile(self.output)

            else:
                log.warning('Output file %s already exists and overwrite not '
                            'specified!' % self.output)
                log.error('Quitting... Please remove before resuming '
                          'operations.')
                raise IOError

        # initialize output value for this method
        outputFITS = {}
        # Default value for NEXTEND when 'build'== True
        nextend = 3
        if not self.build:
            nextend = 0
            if self.outweight:
                if overwrite:
                    if fileutil.findFile(self.outweight):
                        log.info('Deleting previous output WHT product: %s' %
                                 self.outweight)
                    fileutil.removeFile(self.outweight)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outweight)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError


            if self.outcontext:
                if overwrite:
                    if fileutil.findFile(self.outcontext):
                        log.info('Deleting previous output CTX product: %s' %
                                 self.outcontext)
                    fileutil.removeFile(self.outcontext)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outcontext)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError


        # Get default headers from multi-extension FITS file
        # If only writing out single drizzle product, blending needs to be
        # forced off as there is only 1 input to report, no blending needed
        if self.single:
            blend=False

        # If input data is not in MEF FITS format, it will return 'None'
        # and those headers will have to be generated from drizzle output
        # file FITS headers.
        # NOTE: These are HEADER objects, not HDUs
        #prihdr,scihdr,errhdr,dqhdr = getTemplates(template)
        self.fullhdrs, intab = getTemplates(template, blend=False)

        newhdrs, newtab = getTemplates(template,blend=blend)
        if newtab is not None: nextend += 1 # account for new table extn

        prihdr = newhdrs[0]
        scihdr = newhdrs[1]
        errhdr = newhdrs[2]
        dqhdr = newhdrs[3]

        # Setup primary header as an HDU ready for appending to output FITS file
        prihdu = fits.PrimaryHDU(header=prihdr, data=None)

        # Start by updating PRIMARY header keywords...
        prihdu.header.set('EXTEND', value=True, after='NAXIS')
        prihdu.header['NEXTEND'] = nextend
        prihdu.header['FILENAME'] = self.output
        prihdu.header['PROD_VER'] = 'DrizzlePac {}'.format(version.__version__)

        # Update the ROOTNAME with the new value as well
        _indx = self.output.find('_drz')
        if _indx < 0:
            rootname_val = self.output
        else:
            rootname_val = self.output[:_indx]
        prihdu.header['ROOTNAME'] = rootname_val


        # Get the total exposure time for the image
        # If not calculated by PyDrizzle and passed through
        # the pardict, then leave value from the template image.
        if self.texptime:
            prihdu.header['EXPTIME'] = self.texptime
            prihdu.header.set('TEXPTIME', value=self.texptime, after='EXPTIME')
            prihdu.header['EXPSTART'] = self.expstart
            prihdu.header['EXPEND'] = self.expend

        #Update ASN_MTYPE to reflect the fact that this is a product
        # Currently hard-wired to always output 'PROD-DTH' as MTYPE
        prihdu.header['ASN_MTYP'] = 'PROD-DTH'

        # Update DITHCORR calibration keyword if present
        # Remove when we can modify FITS headers in place...
        if 'DRIZCORR' in prihdu.header:
            prihdu.header['DRIZCORR'] = 'COMPLETE'
        if 'DITHCORR' in prihdu.header:
            prihdu.header['DITHCORR'] = 'COMPLETE'

        prihdu.header['NDRIZIM'] =(len(self.parlist),
                                   'Drizzle, No. images drizzled onto output')

        # Only a subset of these keywords makes sense for the new WCS based
        # transformations. They need to be reviewed to decide what to keep
        # and what to leave out.
        if not self.blot:
            self.addDrizKeywords(prihdu.header,versions)

        if scihdr:
            try:
                del scihdr['OBJECT']
            except KeyError:
                pass

            if 'CCDCHIP' in scihdr: scihdr['CCDCHIP'] = '-999'
            if 'NCOMBINE' in scihdr:
                scihdr['NCOMBINE'] = self.parlist[0]['nimages']

            # If BUNIT keyword was found and reset, then
            bunit_last_kw = self.find_kwupdate_location(scihdr,'bunit')
            if self.bunit is not None:
                comment_str = "Units of science product"
                if self.bunit.lower()[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                scihdr.set('BUNIT', value=self.bunit,
                           comment=comment_str,
                           after=bunit_last_kw)
            else:
                # check to see whether to update already present BUNIT comment
                if 'bunit' in scihdr and scihdr['bunit'].lower()[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                    scihdr.set('BUNIT', value=scihdr['bunit'],
                               comment=comment_str,
                               after=bunit_last_kw)

            # Add WCS keywords to SCI header
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(scihdr,'CD1_1')
                addWCSKeywords(self.wcs,scihdr,blot=self.blot,
                                single=self.single, after=pre_wcs_kw)
                # Recompute this after removing distortion kws
                pre_wcs_kw = self.find_kwupdate_location(scihdr,'CD1_1')

        ##########
        # Now, build the output file
        ##########
        if self.build:
            print('-Generating multi-extension output file: ',self.output)
            fo = fits.HDUList()

            # Add primary header to output file...
            fo.append(prihdu)

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=sciarr, header=scihdr, name=EXTLIST[0])
            else:
                hdu = fits.ImageHDU(data=sciarr, header=scihdr, name=EXTLIST[0])
            last_kw = self.find_kwupdate_location(scihdr,'EXTNAME')
            hdu.header.set('EXTNAME', value='SCI', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            fo.append(hdu)

            # Build WHT extension here, if requested...
            if errhdr:
                errhdr['CCDCHIP'] = '-999'

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=whtarr, header=errhdr, name=EXTLIST[1])
            else:
                hdu = fits.ImageHDU(data=whtarr, header=errhdr, name=EXTLIST[1])
            last_kw = self.find_kwupdate_location(errhdr,'EXTNAME')
            hdu.header.set('EXTNAME', value='WHT', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header,'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,hdu.header,blot=self.blot,
                               single=self.single, after=pre_wcs_kw)
            fo.append(hdu)

            # Build CTX extension here
            # If there is only 1 plane, write it out as a 2-D extension
            if self.outcontext:
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr
            else:
                _ctxarr = None

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=_ctxarr, header=dqhdr, name=EXTLIST[2])
            else:
                hdu = fits.ImageHDU(data=_ctxarr, header=dqhdr, name=EXTLIST[2])
            last_kw = self.find_kwupdate_location(dqhdr,'EXTNAME')
            hdu.header.set('EXTNAME', value='CTX', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')

            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header,'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,hdu.header,blot=self.blot,
                               single=self.single, after=pre_wcs_kw)
            fo.append(hdu)

            # remove all alternate WCS solutions from headers of this product
            wcs_functions.removeAllAltWCS(fo,[1])

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual:
                print('Writing out to disk:',self.output)
                # write out file to disk
                fo.writeto(self.output)
                fo.close()
                del fo, hdu
                fo = None
            # End 'if not virtual'
            outputFITS[self.output]= fo

        else:
            print('-Generating simple FITS output: %s' % self.outdata)

            fo = fits.HDUList()
            hdu_header = prihdu.header.copy()
            del hdu_header['nextend']

            # Append remaining unique header keywords from template DQ
            # header to Primary header...
            if scihdr:
                for _card in scihdr.cards:
                    if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu_header:
                        hdu_header.append(_card)
            for kw in ['PCOUNT', 'GCOUNT']:
                try:
                    del kw
                except KeyError:
                    pass
            hdu_header['filename'] = self.outdata

            if self.compress:
                hdu = fits.CompImageHDU(data=sciarr, header=hdu_header)
                wcs_ext = [1]
            else:
                hdu = fits.ImageHDU(data=sciarr, header=hdu_header)
                wcs_ext = [0]

            # explicitly set EXTEND to FALSE for simple FITS files.
            dim = len(sciarr.shape)
            hdu.header.set('extend', value=False, after='NAXIS%s'%dim)

            # Add primary header to output file...
            fo.append(hdu)

            # remove all alternate WCS solutions from headers of this product
            logutil.logging.disable(logutil.logging.INFO)
            wcs_functions.removeAllAltWCS(fo,wcs_ext)
            logutil.logging.disable(logutil.logging.NOTSET)

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual:
                print('Writing out image to disk:',self.outdata)
                # write out file to disk
                fo.writeto(self.outdata)
                del fo,hdu
                fo = None
            # End 'if not virtual'
            outputFITS[self.outdata]= fo

            if self.outweight and whtarr is not None:
                # We need to build new PyFITS objects for each WHT array
                fwht = fits.HDUList()

                if errhdr:
                    errhdr['CCDCHIP'] = '-999'

                if self.compress:
                    hdu = fits.CompImageHDU(data=whtarr, header=prihdu.header)
                else:
                    hdu = fits.ImageHDU(data=whtarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if errhdr:
                    for _card in errhdr.cards:
                        if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu.header:
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outweight
                hdu.header['CCDCHIP'] = '-999'
                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(hdu.header,'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,hdu.header, blot=self.blot,
                                   single=self.single, after=pre_wcs_kw)

                # Add primary header to output file...
                fwht.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fwht,wcs_ext)

                if not virtual:
                    print('Writing out image to disk:',self.outweight)
                    fwht.writeto(self.outweight)
                    del fwht,hdu
                    fwht = None
                # End 'if not virtual'
                outputFITS[self.outweight]= fwht

            # If a context image was specified, build a PyFITS object
            # for it as well...
            if self.outcontext and ctxarr is not None:
                fctx = fits.HDUList()

                # If there is only 1 plane, write it out as a 2-D extension
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr

                if self.compress:
                    hdu = fits.CompImageHDU(data=_ctxarr, header=prihdu.header)
                else:
                    hdu = fits.ImageHDU(data=_ctxarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if dqhdr:
                    for _card in dqhdr.cards:
                        if ( (_card.keyword not in RESERVED_KEYS) and
                             _card.keyword not in hdu.header):
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outcontext
                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(hdu.header,'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,hdu.header, blot=self.blot,
                                   single=self.single, after=pre_wcs_kw)

                fctx.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fctx,wcs_ext)
                if not virtual:
                    print('Writing out image to disk:',self.outcontext)
                    fctx.writeto(self.outcontext)
                    del fctx,hdu
                    fctx = None
                # End 'if not virtual'

                outputFITS[self.outcontext]= fctx

        return outputFITS
Ejemplo n.º 18
0
    def __init__(self, image=None, inimage=None):

        if image == None:
            self.scale = 1.0
            self.coeffs = None
            self.lam = 555.0
            self.xsh = 0.0
            self.ysh = 0.0
            self.rot = 0.0
            self.shft_un = "input"
            self.shft_fr = "input"
            self.align = "center"
            self.xgeoim = ""
            self.ygeoim = ""
            self.d2xscale = 0.0
            self.d2yscale = 0.0
            self.d2xsh = 0.0
            self.d2ysh = 0.0
            self.d2rot = 0.0
            self.d2shft_fr = "output"
        else:

            # Read geometric parameters from a header using an image name as
            # the key

            found = FALSE

            # First search for the entry for this image
            i = 1
            while i < MaxImages:
                datkey = 'D%3iDATA' % i
                datkey = datkey.replace(' ', '0')

                iraf.keypar(image, datkey, silent='yes')

                # If we can't read this no point considering
                if iraf.keypar.value == '':
                    break

                # If we have a match set flag and leave
                if iraf.keypar.value == inimage:
                    found = TRUE
                    break

                i += 1

            if not found:
                raise Exception(
                    "Failed to get keyword information from header.")

            # Now we know that the selected image is present we can
            # get all the other parameters - we don't check whether this
            # succeeds, if it doesn't let it crash
            stem = datkey[:4]

            iraf.keypar(image, stem + "SCAL", silent='yes')
            self.scale = float(iraf.keypar.value)

            iraf.keypar(image, stem + "COEF", silent='yes')
            self.coeffs = iraf.keypar.value
            # Check for existence
            if fileutil.findFile(self.coeffs) == FALSE:
                try:
                    print(
                        '\n-Coeffs file not found.  Trying to reproduce them using PyDrizzle...'
                    )
                    # Try to generate the coeffs file automatically
                    indx = inimage.find('[')
                    p = pydrizzle.PyDrizzle(inimage[:indx],
                                            bits_single=None,
                                            bits_final=None,
                                            updatewcs=FALSE)
                    del p
                except:
                    print("! Cannot access coefficients file. (", self.coeffs,
                          ")")
                    raise Exception("File missing or inaccessible.")

            iraf.keypar(image, stem + "LAM", silent='yes')
            if iraf.keypar.value != '':
                self.lam = float(iraf.keypar.value)
            else:
                self.lam = 555.0

            iraf.keypar(image, stem + "XSH", silent='yes')
            self.xsh = float(iraf.keypar.value)

            iraf.keypar(image, stem + "YSH", silent='yes')
            self.ysh = float(iraf.keypar.value)

            iraf.keypar(image, stem + "ROT", silent='yes')
            self.rot = float(iraf.keypar.value)

            iraf.keypar(image, stem + "SFTU", silent='yes')
            self.shft_un = iraf.keypar.value

            iraf.keypar(image, stem + "SFTF", silent='yes')
            self.shft_fr = iraf.keypar.value

            iraf.keypar(image, stem + "XGIM", silent='yes')
            self.xgeoim = iraf.keypar.value
            indx = self.xgeoim.find('[')
            # Check for existence
            if fileutil.findFile(
                    self.xgeoim[:indx]) == FALSE and self.xgeoim != '':
                print("! Warning, cannot access X distortion correction image")
                print(" continuing without it. (", self.xgeoim, ")")
                self.xgeoim = ''

            iraf.keypar(image, stem + "YGIM", silent='yes')
            self.ygeoim = iraf.keypar.value
            indx = self.ygeoim.find('[')
            # Check for existence
            if fileutil.findFile(
                    self.ygeoim[:indx]) == FALSE and self.ygeoim != '':
                print("! Warning, cannot access Y distortion correction image")
                print(" continuing without it. (", self.ygeoim, ")")
                self.ygeoim = ''

            # The case of the "align" parameter is more tricky, we
            # have to deduce it from INXC keyword
            iraf.keypar(image, stem + "INXC", silent='yes')
            inxc = float(iraf.keypar.value)

            # Need the X and Y dimensions as well - both input and
            # output
            iraf.keypar(inimage, 'i_naxis1', silent='yes')
            xdim = int(iraf.keypar.value)
            iraf.keypar(inimage, 'i_naxis2', silent='yes')
            ydim = int(iraf.keypar.value)

            self.nxin = xdim
            self.nyin = ydim

            iraf.keypar(image, 'i_naxis1', silent='yes')
            xdim = int(iraf.keypar.value)
            iraf.keypar(image, 'i_naxis2', silent='yes')
            ydim = int(iraf.keypar.value)

            self.nxout = xdim
            self.nyout = ydim

            if abs(inxc - float(xdim / 2) - 0.5) < 1e-4:
                self.align = 'corner'
            else:
                self.align = 'center'

            # Check for the presence of secondary parameters
            iraf.keypar(image, stem + "SECP", silent='yes')
            if iraf.keypar.value == "yes":
                raise Exception(
                    "Sorry, this version does NOT support secondary parameters."
                )
            else:
                self.secp = FALSE
Ejemplo n.º 19
0
def writeSingleFITS(data,wcs,output,template,clobber=True,verbose=True):
    """ Write out a simple FITS file given a numpy array and the name of another
    FITS file to use as a template for the output image header.
    """
    outname,outextn = fileutil.parseFilename(output)
    outextname,outextver = fileutil.parseExtn(outextn)

    if fileutil.findFile(outname):
        if clobber:
            log.info('Deleting previous output product: %s' % outname)
            fileutil.removeFile(outname)

        else:
            log.warning('Output file %s already exists and overwrite not '
                        'specified!' % outname)
            log.error('Quitting... Please remove before resuming operations.')
            raise IOError

    # Now update WCS keywords with values from provided WCS
    if hasattr(wcs.sip,'a_order'):
        siphdr = True
    else:
        siphdr = False
    wcshdr = wcs.wcs2header(sip2hdr=siphdr)

    if template is not None:
        # Get default headers from multi-extension FITS file
        # If input data is not in MEF FITS format, it will return 'None'
        # NOTE: These are HEADER objects, not HDUs
        (prihdr,scihdr,errhdr,dqhdr),newtab = getTemplates(template,EXTLIST)

        if scihdr is None:
            scihdr = fits.Header()
            indx = 0
            for c in prihdr.cards:
                if c.keyword not in ['INHERIT','EXPNAME']: indx += 1
                else: break
            for i in range(indx,len(prihdr)):
                scihdr.append(prihdr.cards[i])
            for i in range(indx, len(prihdr)):
                del prihdr[indx]
    else:
        scihdr = fits.Header()
        prihdr = fits.Header()
        # Start by updating PRIMARY header keywords...
        prihdr.set('EXTEND', value=True, after='NAXIS')
        prihdr['FILENAME'] = outname

    if outextname == '':
        outextname = 'sci'
    if outextver == 0: outextver = 1
    scihdr['EXTNAME'] = outextname.upper()
    scihdr['EXTVER'] = outextver

    for card in wcshdr.cards:
        scihdr[card.keyword] = (card.value, card.comment)

    # Create PyFITS HDUList for all extensions
    outhdu = fits.HDUList()
    # Setup primary header as an HDU ready for appending to output FITS file
    prihdu = fits.PrimaryHDU(header=prihdr)
    scihdu = fits.ImageHDU(header=scihdr,data=data)

    outhdu.append(prihdu)
    outhdu.append(scihdu)
    outhdu.writeto(outname)

    if verbose:
        print('Created output image: %s' % outname)
Ejemplo n.º 20
0
def apply_d2im_correction(fname, d2imcorr):
    """
    Logic to decide whether to apply the D2IM correction.

    Parameters
    ----------
    fname : `~astropy.io.fits.HDUList` or str
        Input FITS science file object.
    d2imcorr : bool
        Flag indicating if D2IM is should be enabled if allowed.

    Return
    ------
    applyD2IMCorr : bool
        Flag whether to apply the correction.

    The D2IM correction is applied to a science file if it is in the
    allowed corrections for the instrument. The name of the file
    with the correction is saved in the ``D2IMFILE`` keyword in the
    primary header. When the correction is applied the name of the
    file is saved in the ``D2IMEXT`` keyword in the 1st extension header.

    """
    fname, toclose = _toclose(fname)

    applyD2IMCorr = True
    if not d2imcorr:
        logger.info("D2IM correction not requested - not applying it.")
        return False
    # get D2IMFILE kw from primary header
    try:
        fd2im0 = fname[0].header['D2IMFILE']
    except KeyError:
        logger.info(
            "D2IMFILE keyword is missing - D2IM correction will not be applied."
        )
        return False
    if fd2im0 == 'N/A':
        utils.remove_distortion(fname, "D2IMFILE")
        return False
    fd2im0 = fileutil.osfn(fd2im0)
    if not fileutil.findFile(fd2im0):
        message = "D2IMFILE {0} not found.".format(fd2im0)
        logger.critical(message)
        raise IOError(message)
    try:
        # get D2IMEXT kw from first extension header
        fd2imext = fname[1].header['D2IMEXT']

    except KeyError:
        # the case of D2IMFILE kw present in primary header but D2IMEXT missing
        # in first extension header
        return True
    fd2imext = fileutil.osfn(fd2imext)
    if fd2imext and fileutil.findFile(fd2imext):
        if fd2im0 != fd2imext:
            applyD2IMCorr = True
        else:
            applyD2IMCorr = False
    else:
        # D2IM file defined in first extension may not be found
        # but if a valid kw exists in the primary header,
        # detector to image correction should be applied.
        applyD2IMCorr = True
    if toclose:
        fname.close()
    return applyD2IMCorr
Ejemplo n.º 21
0
def buildShadowMaskImage(dqfile,
                         detnum,
                         extnum,
                         maskname,
                         bitvalue=None,
                         binned=1):
    """ Builds mask image from WFPC2 shadow calibrations.
      detnum - string value for 'DETECTOR' detector
    """
    # insure detnum is a string
    if type(detnum) != type(''):
        detnum = repr(detnum)

    _funcroot = '_func_Shadow_WF'

    # build template shadow mask's filename

    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname):
        fileutil.removeFile(maskname)

    _use_inmask = not fileutil.findFile(dqfile) or bitvalue is None

    # Check for existance of input .c1h file for use in making inmask file
    if _use_inmask:
        #_mask = 'wfpc2_inmask'+detnum+'.fits'
        _mask = maskname
        # Check to see if file exists...
        if not fileutil.findFile(_mask):
            # If not, create the file.
            # This takes a long time to run, so it should be done
            # only when absolutely necessary...
            try:
                _funcx = _funcroot + detnum + 'x'
                _funcy = _funcroot + detnum + 'y'

                _xarr = np.clip(np.fromfunction(eval(_funcx), (800, 800)), 0.0,
                                1.0).astype(np.uint8)
                _yarr = np.clip(np.fromfunction(eval(_funcy), (800, 800)), 0.0,
                                1.0).astype(np.uint8)
                maskarr = _xarr * _yarr

                if binned != 1:
                    bmaskarr = maskarr[::2, ::2]
                    bmaskarr *= maskarr[1::2, ::2]
                    bmaskarr *= maskarr[::2, 1::2]
                    bmaskarr *= maskarr[1::2, 1::2]
                    maskarr = bmaskarr.copy()
                    del bmaskarr

                #Write out the mask file as simple FITS file
                fmask = fits.open(_mask, mode='append', memmap=False)
                maskhdu = fits.PrimaryHDU(data=maskarr)
                fmask.append(maskhdu)

                #Close files
                fmask.close()
                del fmask
            except:
                return None

    else:
        #
        # Build full mask based on .c1h and shadow mask
        #
        fdq = fileutil.openImage(dqfile, mode='readonly', memmap=False)
        try:
            # Read in DQ array from .c1h and from shadow mask files
            dqarr = fdq[int(extnum)].data
            #maskarr = fsmask[0].data

            # Build mask array from DQ array
            dqmaskarr = buildMask(dqarr, bitvalue)

            #Write out the mask file as simple FITS file
            fdqmask = fits.open(maskname, mode='append', memmap=False)
            maskhdu = fits.PrimaryHDU(data=dqmaskarr)
            fdqmask.append(maskhdu)

            #Close files
            fdqmask.close()
            del fdqmask
            fdq.close()
            del fdq

        except:
            fdq.close()
            del fdq
            # Safeguard against leaving behind an incomplete file
            if fileutil.findFile(maskname):
                os.remove(maskname)
            _errstr = "\nWarning: Problem creating DQMASK file for " + rootname + ".\n"
            #raise IOError, _errstr
            print(_errstr)
            return None

    # Return the name of the mask image written out
    return maskname
Ejemplo n.º 22
0
def writeSingleFITS(data,
                    wcs,
                    output,
                    template,
                    clobber=True,
                    verbose=True,
                    rules_file=None):
    """ Write out a simple FITS file given a numpy array and the name of another
    FITS file to use as a template for the output image header.
    """
    outname, outextn = fileutil.parseFilename(output)
    outextname, outextver = fileutil.parseExtn(outextn)

    if fileutil.findFile(outname):
        if clobber:
            log.info('Deleting previous output product: %s' % outname)
            fileutil.removeFile(outname)

        else:
            log.warning('Output file %s already exists and overwrite not '
                        'specified!' % outname)
            log.error('Quitting... Please remove before resuming operations.')
            raise IOError

    # Now update WCS keywords with values from provided WCS
    if hasattr(wcs.sip, 'a_order'):
        siphdr = True
    else:
        siphdr = False
    wcshdr = wcs.wcs2header(sip2hdr=siphdr)

    if template is not None:
        # Get default headers from multi-extension FITS file
        # If input data is not in MEF FITS format, it will return 'None'
        # NOTE: These are HEADER objects, not HDUs
        (prihdr, scihdr, errhdr,
         dqhdr), newtab = getTemplates(template,
                                       EXTLIST,
                                       rules_file=rules_file)

        if scihdr is None:
            scihdr = fits.Header()
            indx = 0
            for c in prihdr.cards:
                if c.keyword not in ['INHERIT', 'EXPNAME']: indx += 1
                else: break
            for i in range(indx, len(prihdr)):
                scihdr.append(prihdr.cards[i])
            for i in range(indx, len(prihdr)):
                del prihdr[indx]
    else:
        scihdr = fits.Header()
        prihdr = fits.Header()
        # Start by updating PRIMARY header keywords...
        prihdr.set('EXTEND', value=True, after='NAXIS')
        prihdr['FILENAME'] = outname

    if outextname == '':
        outextname = 'sci'
    if outextver == 0: outextver = 1
    scihdr['EXTNAME'] = outextname.upper()
    scihdr['EXTVER'] = outextver
    scihdr.update(wcshdr)

    # Create PyFITS HDUList for all extensions
    outhdu = fits.HDUList()
    # Setup primary header as an HDU ready for appending to output FITS file
    prihdu = fits.PrimaryHDU(header=prihdr)
    scihdu = fits.ImageHDU(header=scihdr, data=data)

    outhdu.append(prihdu)
    outhdu.append(scihdu)
    outhdu.writeto(outname)

    if verbose:
        print('Created output image: %s' % outname)
Ejemplo n.º 23
0
def buildMaskImage(rootname, bitvalue, output, extname='DQ', extver=1):
    """ Builds mask image from rootname's DQ array
        If there is no valid 'DQ' array in image, then return
        an empty string.
    """

    # If no bitvalue is set or rootname given, assume no mask is desired
    # However, this name would be useful as the output mask from
    # other processing, such as MultiDrizzle, so return it anyway.
    #if bitvalue == None or rootname == None:
    #    return None

    # build output name
    maskname = output

    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname):
        fileutil.removeFile(maskname)

    # Open input file with DQ array
    fdq = fileutil.openImage(rootname, mode='readonly', memmap=False)
    try:
        _extn = fileutil.findExtname(fdq, extname, extver=extver)
        if _extn is not None:
            # Read in DQ array
            dqarr = fdq[_extn].data
        else:
            dqarr = None

        # For the case where there is no DQ array,
        # create a mask image of all ones.
        if dqarr is None:
            # We need to get the dimensions of the output DQ array
            # Since the DQ array is non-existent, look for the SCI extension
            _sci_extn = fileutil.findExtname(fdq, 'SCI', extver=extver)
            if _sci_extn is not None:
                _shape = fdq[_sci_extn].data.shape
                dqarr = np.zeros(_shape, dtype=np.uint16)
            else:
                raise Exception
        # Build mask array from DQ array
        maskarr = buildMask(dqarr, bitvalue)
        #Write out the mask file as simple FITS file
        fmask = fits.open(maskname, mode='append', memmap=False)
        maskhdu = fits.PrimaryHDU(data=maskarr)
        fmask.append(maskhdu)

        #Close files
        fmask.close()
        del fmask
        fdq.close()
        del fdq

    except:
        fdq.close()
        del fdq
        # Safeguard against leaving behind an incomplete file
        if fileutil.findFile(maskname):
            os.remove(maskname)
        _errstr = "\nWarning: Problem creating MASK file for " + rootname + ".\n"
        #raise IOError, _errstr
        print(_errstr)
        return None

    # Return the name of the mask image written out
    return maskname
Ejemplo n.º 24
0
def run_driz(imageObjectList,
             output_wcs,
             paramDict,
             single,
             build,
             wcsmap=None):
    """ Perform drizzle operation on input to create output.
    The input parameters originally was a list
    of dictionaries, one for each input, that matches the
    primary parameters for an ``IRAF`` `drizzle` task.

    This method would then loop over all the entries in the
    list and run `drizzle` for each entry.

    Parameters required for input in paramDict:
        build,single,units,wt_scl,pixfrac,kernel,fillval,
        rot,scale,xsh,ysh,blotnx,blotny,outnx,outny,data
    """
    # Insure that input imageObject is a list
    if not isinstance(imageObjectList, list):
        imageObjectList = [imageObjectList]

    #
    # Setup the versions info dictionary for output to PRIMARY header
    # The keys will be used as the name reported in the header, as-is
    #
    _versions = {
        'AstroDrizzle': __version__,
        'PyFITS': util.__fits_version__,
        'Numpy': util.__numpy_version__
    }

    # Set sub-sampling rate for drizzling
    # stepsize = 2.0
    log.info('  **Using sub-sampling value of %s for kernel %s' %
             (paramDict['stepsize'], paramDict['kernel']))

    maskval = interpret_maskval(paramDict)

    outwcs = copy.deepcopy(output_wcs)

    # Check for existance of output file.
    if (not single and build
            and fileutil.findFile(imageObjectList[0].outputNames['outFinal'])):
        log.info('Removing previous output product...')
        os.remove(imageObjectList[0].outputNames['outFinal'])

    # print out parameters being used for drizzling
    log.info("Running Drizzle to create output frame with WCS of: ")
    output_wcs.printwcs()

    # Will we be running in parallel?
    pool_size = util.get_pool_size(paramDict.get('num_cores'),
                                   len(imageObjectList))
    run_parallel = single and pool_size > 1
    if run_parallel:
        log.info(f'Executing {pool_size:d} parallel workers')
    else:
        if single:  # not yet an option for final drizzle, msg would confuse
            log.info('Executing serially')

    # Set parameters for each input and run drizzle on it here.
    #
    # Perform drizzling...

    numctx = 0
    for img in imageObjectList:
        numctx += img._nmembers
    _numctx = {'all': numctx}

    #            if single:
    # Determine how many chips make up each single image
    for img in imageObjectList:
        for chip in img.returnAllChips(extname=img.scienceExt):
            plsingle = chip.outputNames['outSingle']
            if plsingle in _numctx: _numctx[plsingle] += 1
            else: _numctx[plsingle] = 1

    # Compute how many planes will be needed for the context image.
    _nplanes = int((_numctx['all'] - 1) / 32) + 1
    # For single drizzling or when context is turned off,
    # minimize to 1 plane only...
    if single or imageObjectList[0][1].outputNames['outContext'] in [
            None, '', ' '
    ]:
        _nplanes = 1

    #
    # An image buffer needs to be setup for converting the input
    # arrays (sci and wht) from FITS format to native format
    # with respect to byteorder and byteswapping.
    # This buffer should be reused for each input if possible.
    #
    _outsci = _outwht = _outctx = _hdrlist = None
    if (not single) or \
       (single and (not run_parallel) and (not imageObjectList[0].inmemory)):
        # Note there are four cases/combinations for single drizzle alone here:
        # (not-inmem, serial), (not-inmem, parallel), (inmem, serial), (inmem, parallel)
        _outsci = np.empty(output_wcs.array_shape, dtype=np.float32)
        _outsci.fill(maskval)
        _outwht = np.zeros(output_wcs.array_shape, dtype=np.float32)
        # initialize context to 3-D array but only pass appropriate plane to drizzle as needed
        _outctx = np.zeros((_nplanes, ) + output_wcs.array_shape,
                           dtype=np.int32)
        _hdrlist = []

    # Keep track of how many chips have been processed
    # For single case, this will determine when to close
    # one product and open the next.
    _chipIdx = 0

    # Remember the name of the 1st image that goes into this particular product
    # Insure that the header reports the proper values for the start of the
    # exposure time used to make this; in particular, TIME-OBS and DATE-OBS.
    template = None

    #
    # Work on each image
    #
    subprocs = []
    for img in imageObjectList:

        chiplist = img.returnAllChips(extname=img.scienceExt)

        # How many inputs should go into this product?
        num_in_prod = _numctx['all']
        if single:
            num_in_prod = _numctx[chiplist[0].outputNames['outSingle']]

        # The name of the 1st image
        fnames = []
        for chip in chiplist:
            fnames.append(chip.outputNames['data'])

        if _chipIdx == 0:
            template = fnames
        else:
            template.extend(fnames)

        # Work each image, possibly in parallel
        if run_parallel:
            # use multiprocessing.Manager only if in parallel and in memory
            mp_ctx = multiprocessing.get_context('fork')

            if img.inmemory:
                manager = mp_ctx.Manager()
                dproxy = manager.dict(
                    img.virtualOutputs)  # copy & wrap it in proxy
                img.virtualOutputs = dproxy

            # parallelize run_driz_img (currently for separate drizzle only)
            p = mp_ctx.Process(
                target=run_driz_img,
                name='adrizzle.run_driz_img()',  # for err msgs
                args=(img, chiplist, output_wcs, outwcs, template, paramDict,
                      single, num_in_prod, build, _versions, _numctx, _nplanes,
                      _chipIdx, None, None, None, None, wcsmap))
            subprocs.append(p)
        else:
            # serial run_driz_img run (either separate drizzle or final drizzle)
            run_driz_img(img, chiplist, output_wcs, outwcs, template,
                         paramDict, single, num_in_prod, build, _versions,
                         _numctx, _nplanes, _chipIdx, _outsci, _outwht,
                         _outctx, _hdrlist, wcsmap)

        # Increment/reset master chip counter
        _chipIdx += len(chiplist)
        if _chipIdx == num_in_prod:
            _chipIdx = 0

    # do the join if we spawned tasks
    if run_parallel:
        mputil.launch_and_wait(subprocs, pool_size)  # blocks till all done

    del _outsci, _outwht, _outctx, _hdrlist
Ejemplo n.º 25
0
def run(input,quiet=yes,restore=no,prepend='O', tddcorr=True):

    print("+ MAKEWCS Version %s" % __version__)

    _prepend = prepend

    files = parseinput.parseinput(input)[0]
    newfiles = []
    if files == []:
        print("No valid input files found.\n")
        raise IOError

    for image in files:
        #find out what the input is
        imgfits,imgtype = fileutil.isFits(image)

        # Check for existence of waiver FITS input, and quit if found.
        if imgfits and imgtype == 'waiver':
            """
            errormsg = '\n\nPyDrizzle does not support waiver fits format.\n'
            errormsg += 'Convert the input files to GEIS or multiextension FITS.\n\n'
            raise ValueError, errormsg
            """
            newfilename = fileutil.buildNewRootname(image, extn='_c0h.fits')
            # Convert GEIS image to MEF file
            newimage = fileutil.openImage(image,writefits=True,fitsname=newfilename,clobber=True)
            del newimage
            # Work with new file
            image = newfilename
            newfiles.append(image)
        # If a GEIS image is provided as input, create a new MEF file with
        # a name generated using 'buildFITSName()' and update that new MEF file.
        if not imgfits:
            # Create standardized name for MEF file
            newfilename = fileutil.buildFITSName(image)
            # Convert GEIS image to MEF file
            newimage = fileutil.openImage(image,writefits=True,fitsname=newfilename,clobber=True)
            del newimage
            # Work with new file
            image = newfilename
            newfiles.append(image)

        if not quiet:
            print("Input files: ",files)

        # First get the name of the IDC table
        #idctab = drutil.getIDCFile(_files[0][0],keyword='idctab')[0]
        idctab = drutil.getIDCFile(image,keyword='idctab')[0]
        _found = fileutil.findFile(idctab)
        if idctab == None or idctab == '':
            print('#\n No IDCTAB specified.  No correction can be done for file %s.Quitting makewcs\n' %image)
            #raise ValueError
            continue
        elif not _found:
            print('#\n IDCTAB: ',idctab,' could not be found. \n')
            print('WCS keywords for file %s will not be updated.\n' %image)
            #raise IOError
            continue

        _phdu = image + '[0]'
        _instrument = fileutil.getKeyword(_phdu,keyword='INSTRUME')
        if _instrument == 'WFPC2':
            Nrefchip, Nrefext = getNrefchip(image)
        else:
            Nrefchip = None
            Nrefext = None
        if _instrument not in NUM_PER_EXTN:

            raise ValueError("Instrument %s not supported yet. Exiting..." \
                             %_instrument)

        _detector = fileutil.getKeyword(_phdu, keyword='DETECTOR')
        _nimsets = get_numsci(image)

        for i in range(_nimsets):
            if image.find('.fits') > 0:
                _img = image+'[sci,'+repr(i+1)+']'
            else:
                _img = image+'['+repr(i+1)+']'
            if not restore:
                if not quiet:
                    print('Updating image: ', _img)

                _update(_img,idctab, _nimsets, apply_tdd=False,
                        quiet=quiet,instrument=_instrument,prepend=_prepend,
                        nrchip=Nrefchip, nrext = Nrefext)
                if _instrument == 'ACS' and _detector == 'WFC':
                    tddswitch = fileutil.getKeyword(_phdu,keyword='TDDCORR')
                    # This logic requires that TDDCORR be in the primary header
                    # and set to PERFORM in order to turn this on at all. It can
                    # be turned off by setting either tddcorr=False or setting
                    # the keyword to anything but PERFORM or by deleting the
                    # keyword altogether. PyDrizzle will rely simply on the
                    # values of alpha and beta as computed here to apply the
                    # correction to the coefficients.
                    if (tddcorr and tddswitch != 'OMIT'):
                        print('Applying time-dependent distortion corrections...')
                        _update(_img,idctab, _nimsets, apply_tdd=True, \
                                quiet=quiet,instrument=_instrument,prepend=_prepend, nrchip=Nrefchip, nrext = Nrefext)
            else:
                if not quiet:
                    print('Restoring original WCS values for',_img)
                restoreCD(_img,_prepend)

        #fimg = fileutil.openImage(image,mode='update')
        #if 'TDDCORR' in fimg[0].header and fimg[0].header['TDDCORR'] == 'PERFORM':
        #    fimg[0].header['TDDCORR'] = 'COMPLETE'
        #fimg.close()

    if newfiles == []:
        return files
    else:
        return newfiles