Exemple #1
0
def wf2_pydr(filename):
    trl_start = "PYDRIZZLE STARTED------------ %s ------------\n" % time.asctime(
    )

    filename = fileutil.buildRootname(filename, ext=['_c0m.fits', '_c0f.fits'])
    try:
        hdr0 = pyfits.getheader(filename)
    except:
        errormsg = "\n\nCan't find file %s \n", filename
        raise IOError(errormsg)

    try:
        dodrizcorr = hdr0['DRIZCORR']
    except KeyError:
        print("Keyword DRIZCORR not found\n")
        dodrizcorr = None
    if isSupportedFilter(hdr0) and isSupportedOfftab(hdr0):
        useIDCModel = True
    else:
        useIDCModel = False

    if dodrizcorr != None:
        if dodrizcorr == 'PERFORM':
            try:
                # update WCS information
                if useIDCModel:
                    p = pydrizzle.PyDrizzle(filename,
                                            bits_single=0,
                                            bits_final=0)
                else:
                    p = pydrizzle.PyDrizzle(filename,
                                            bits_single=0,
                                            bits_final=0,
                                            updatewcs=False,
                                            idckey=None)
                # resample to the scale of chip #3
                # this is done in the pipeline to keep the size of the
                # output drz product small
                chips = [member.chip for member in p.observation.members]
                """
                try:
                    chip_ind = chips.index('3')
                except ValueError:
                    chip_ind = 0
                oscale=p.observation.members[chip_ind].geometry.wcslin.pscale
                """
                # Reset the plate-scale to a fixed value of 0.1 arcsecond/pixel
                # but NOT if the PC was the only chip read out.
                if len(chips) > 1 or (len(chips) == 1 and chips[0] != '1'):
                    skf = pydrizzle.pydrizzle.SkyField()
                    skf.set(psize=0.1)
                    p.resetPars(skf)
                p.run(clean='yes')
                # Clean up any mask files which were created and not deleted by PyDrizzle
                fileutil.removeFile(glob.glob('wfpc2_inmask*.fits'))

            except:
                raise
            #update_header(filename)
            # Update header of output file, not input file
            update_header(p.output)
            trl_end = "END PYDRIZZLE------------ %s ------------\n" % time.asctime(
            )
        else:
            trl_end = 'PYDRIZZLE processing not requested\n'
            trl_end += "END PYDRIZZLE------------ %s ------------\n" % time.asctime(
            )
            return trl_start, trl_end
    else:
        trl_end = 'Keyword DRIZCORR missing from header.\n'
        trl_end += 'Pydrizzle did not run.\n'

    return trl_start, trl_end
Exemple #2
0
    def writeFITS(self,
                  template,
                  sciarr,
                  whtarr,
                  ctxarr=None,
                  versions=None,
                  overwrite=yes,
                  blend=True,
                  virtual=False,
                  rules_file=None,
                  logfile=None):
        """
        Generate PyFITS objects for each output extension
        using the file given by 'template' for populating
        headers.

        The arrays will have the size specified by 'shape'.
        """
        if not isinstance(template, list):
            template = [template]

        if fileutil.findFile(self.output):
            if overwrite:
                log.info('Deleting previous output product: %s' % self.output)
                fileutil.removeFile(self.output)

            else:
                log.warning('Output file %s already exists and overwrite not '
                            'specified!' % self.output)
                log.error('Quitting... Please remove before resuming '
                          'operations.')
                raise IOError

        # initialize output value for this method
        outputFITS = {}
        # Default value for NEXTEND when 'build'== True
        nextend = 3
        if not self.build:
            nextend = 0
            if self.outweight:
                if overwrite:
                    if fileutil.findFile(self.outweight):
                        log.info('Deleting previous output WHT product: %s' %
                                 self.outweight)
                    fileutil.removeFile(self.outweight)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outweight)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError

            if self.outcontext:
                if overwrite:
                    if fileutil.findFile(self.outcontext):
                        log.info('Deleting previous output CTX product: %s' %
                                 self.outcontext)
                    fileutil.removeFile(self.outcontext)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outcontext)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError

        # Get default headers from multi-extension FITS file
        # If only writing out single drizzle product, blending needs to be
        # forced off as there is only 1 input to report, no blending needed
        if self.single:
            blend = False

        # If input data is not in MEF FITS format, it will return 'None'
        # and those headers will have to be generated from drizzle output
        # file FITS headers.
        # NOTE: These are HEADER objects, not HDUs
        self.fullhdrs, intab = getTemplates(template,
                                            blend=False,
                                            rules_file=rules_file)

        newhdrs, newtab = getTemplates(template,
                                       blend=blend,
                                       rules_file=rules_file)
        if newtab is not None: nextend += 1  # account for new table extn

        prihdr = newhdrs[0]
        scihdr = newhdrs[1]
        errhdr = newhdrs[2]
        dqhdr = newhdrs[3]

        # Setup primary header as an HDU ready for appending to output FITS file
        prihdu = fits.PrimaryHDU(header=prihdr, data=None)

        # Start by updating PRIMARY header keywords...
        prihdu.header.set('EXTEND', value=True, after='NAXIS')
        prihdu.header['NEXTEND'] = nextend
        prihdu.header['FILENAME'] = self.output
        prihdu.header['PROD_VER'] = 'DrizzlePac {}'.format(__version__)
        prihdu.header['DRIZPARS'] = (logfile, "Logfile for processing")

        # Update the ROOTNAME with the new value as well
        _indx = self.output.find('_drz')
        if _indx < 0:
            rootname_val = self.output
        else:
            rootname_val = self.output[:_indx]
        prihdu.header['ROOTNAME'] = rootname_val

        # Get the total exposure time for the image
        # If not calculated by PyDrizzle and passed through
        # the pardict, then leave value from the template image.
        if self.texptime:
            prihdu.header['EXPTIME'] = self.texptime
            prihdu.header.set('TEXPTIME', value=self.texptime, after='EXPTIME')
            prihdu.header['EXPSTART'] = self.expstart
            prihdu.header['EXPEND'] = self.expend

        # Update ASN_MTYPE to reflect the fact that this is a product
        # Currently hard-wired to always output 'PROD-DTH' as MTYPE
        prihdu.header['ASN_MTYP'] = 'PROD-DTH'

        # Update DITHCORR calibration keyword if present
        # Remove when we can modify FITS headers in place...
        if 'DRIZCORR' in prihdu.header:
            prihdu.header['DRIZCORR'] = 'COMPLETE'
        if 'DITHCORR' in prihdu.header:
            prihdu.header['DITHCORR'] = 'COMPLETE'

        prihdu.header['NDRIZIM'] = (len(self.parlist),
                                    'Drizzle, No. images drizzled onto output')

        # Only a subset of these keywords makes sense for the new WCS based
        # transformations. They need to be reviewed to decide what to keep
        # and what to leave out.
        if not self.blot:
            self.addDrizKeywords(prihdu.header, versions)

        if scihdr:
            scihdr.pop('OBJECT', None)

            if 'CCDCHIP' in scihdr: scihdr['CCDCHIP'] = '-999'
            if 'NCOMBINE' in scihdr:
                scihdr['NCOMBINE'] = self.parlist[0]['nimages']

            # If BUNIT keyword was found and reset, then
            bunit_last_kw = self.find_kwupdate_location(scihdr, 'bunit')
            if self.bunit is not None:
                comment_str = "Units of science product"
                if self.bunit.lower()[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                scihdr.set('BUNIT',
                           value=self.bunit,
                           comment=comment_str,
                           after=bunit_last_kw)
            else:
                # check to see whether to update already present BUNIT comment
                if 'bunit' in scihdr and scihdr['bunit'].lower(
                )[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                    scihdr.set('BUNIT',
                               value=scihdr['bunit'],
                               comment=comment_str,
                               after=bunit_last_kw)

            # Add WCS keywords to SCI header
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(scihdr, 'CD1_1')
                addWCSKeywords(self.wcs,
                               scihdr,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
                # Recompute this after removing distortion kws
                pre_wcs_kw = self.find_kwupdate_location(scihdr, 'CD1_1')

        ##########
        # Now, build the output file
        ##########
        if self.build:
            print('-Generating multi-extension output file: ', self.output)
            fo = fits.HDUList()

            # Add primary header to output file...
            fo.append(prihdu)

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=sciarr,
                                        header=scihdr,
                                        name=EXTLIST[0])
            else:
                hdu = fits.ImageHDU(data=sciarr,
                                    header=scihdr,
                                    name=EXTLIST[0])
            last_kw = self.find_kwupdate_location(scihdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='SCI', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            fo.append(hdu)

            # Build WHT extension here, if requested...
            if errhdr:
                errhdr['CCDCHIP'] = '-999'

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=whtarr,
                                        header=errhdr,
                                        name=EXTLIST[1])
            else:
                hdu = fits.ImageHDU(data=whtarr,
                                    header=errhdr,
                                    name=EXTLIST[1])
            last_kw = self.find_kwupdate_location(errhdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='WHT', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header, 'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,
                               hdu.header,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
            fo.append(hdu)

            # Build CTX extension here
            # If there is only 1 plane, write it out as a 2-D extension
            if self.outcontext:
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr
            else:
                _ctxarr = None

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=_ctxarr,
                                        header=dqhdr,
                                        name=EXTLIST[2])
            else:
                hdu = fits.ImageHDU(data=_ctxarr,
                                    header=dqhdr,
                                    name=EXTLIST[2])
            last_kw = self.find_kwupdate_location(dqhdr, 'EXTNAME')
            hdu.header.set('EXTNAME', value='CTX', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')

            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header, 'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,
                               hdu.header,
                               blot=self.blot,
                               single=self.single,
                               after=pre_wcs_kw)
            fo.append(hdu)

            # remove all alternate WCS solutions from headers of this product
            wcs_functions.removeAllAltWCS(fo, [1])

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual:
                print('Writing out to disk:', self.output)
                # write out file to disk
                fo.writeto(self.output)
                fo.close()
                del fo, hdu
                fo = None
            # End 'if not virtual'
            outputFITS[self.output] = fo

        else:
            print('-Generating simple FITS output: %s' % self.outdata)

            fo = fits.HDUList()
            hdu_header = prihdu.header.copy()
            del hdu_header['nextend']

            # Append remaining unique header keywords from template DQ
            # header to Primary header...
            if scihdr:
                for _card in scihdr.cards:
                    if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu_header:
                        hdu_header.append(_card)

            hdu_header['filename'] = self.outdata

            if self.compress:
                hdu = fits.CompImageHDU(data=sciarr, header=hdu_header)
                wcs_ext = [1]
            else:
                hdu = fits.PrimaryHDU(data=sciarr, header=hdu_header)
                wcs_ext = [0]

            # explicitly set EXTEND to FALSE for simple FITS files.
            dim = len(sciarr.shape)
            hdu.header.set('extend', value=False, after='NAXIS%s' % dim)

            # explicitly remove EXTNAME, EXTVER from header
            # since this header may have been used
            # to create a CompImageHDU instance instead of a PrimaryHDU instance
            for kw in ['EXTNAME', 'EXTVER', 'PCOUNT', 'GCOUNT']:
                hdu.header.pop(kw, None)

            hdu.header.set('filetype',
                           'SCI',
                           before='TELESCOP',
                           comment='Type of data in array')

            # Add primary header to output file...
            fo.append(hdu)

            if not self.blot:
                # remove all alternate WCS solutions from headers of this product
                logutil.logging.disable(logutil.logging.INFO)
                wcs_functions.removeAllAltWCS(fo, wcs_ext)
                logutil.logging.disable(logutil.logging.NOTSET)

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual or "single_sci" in self.outdata:
                print('Writing out image to disk:', self.outdata)
                # write out file to disk
                fo.writeto(self.outdata, overwrite=True)
                del hdu
                if "single_sci" not in self.outdata:
                    del fo
                    fo = None
            # End 'if not virtual'
            outputFITS[self.outdata] = fo

            if self.outweight and whtarr is not None:
                # We need to build new PyFITS objects for each WHT array
                fwht = fits.HDUList()

                if errhdr:
                    errhdr['CCDCHIP'] = '-999'

                if self.compress:
                    hdu = fits.CompImageHDU(data=whtarr, header=prihdu.header)
                else:
                    hdu = fits.PrimaryHDU(data=whtarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if errhdr:
                    for _card in errhdr.cards:
                        if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu.header:
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outweight
                hdu.header['CCDCHIP'] = '-999'

                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(
                        hdu.header, 'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,
                                   hdu.header,
                                   blot=self.blot,
                                   single=self.single,
                                   after=pre_wcs_kw)

                # explicitly remove EXTNAME, EXTVER from header
                # since this header may have been used
                # to create a CompImageHDU instance instead of a PrimaryHDU instance
                for kw in ['EXTNAME', 'EXTVER', 'PCOUNT', 'GCOUNT']:
                    hdu.header.pop(kw, None)

                hdu.header.set('filetype',
                               'WHT',
                               before='TELESCOP',
                               comment='Type of data in array')

                # Add primary header to output file...
                fwht.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fwht, wcs_ext)

                if not virtual:
                    print('Writing out image to disk:', self.outweight)
                    fwht.writeto(self.outweight, overwrite=True)
                    del fwht, hdu
                    fwht = None
                # End 'if not virtual'
                outputFITS[self.outweight] = fwht

            # If a context image was specified, build a PyFITS object
            # for it as well...
            if self.outcontext and ctxarr is not None:
                fctx = fits.HDUList()

                # If there is only 1 plane, write it out as a 2-D extension
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr

                if self.compress:
                    hdu = fits.CompImageHDU(data=_ctxarr, header=prihdu.header)
                else:
                    hdu = fits.PrimaryHDU(data=_ctxarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if dqhdr:
                    for _card in dqhdr.cards:
                        if ((_card.keyword not in RESERVED_KEYS)
                                and _card.keyword not in hdu.header):
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outcontext

                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(
                        hdu.header, 'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,
                                   hdu.header,
                                   blot=self.blot,
                                   single=self.single,
                                   after=pre_wcs_kw)

                # explicitly remove EXTNAME, EXTVER from header
                # since this header may have been used
                # to create a CompImageHDU instance instead of a PrimaryHDU instance
                for kw in ['EXTNAME', 'EXTVER', 'PCOUNT', 'GCOUNT']:
                    hdu.header.pop(kw, None)

                hdu.header.set('filetype',
                               'CTX',
                               before='TELESCOP',
                               comment='Type of data in array')

                fctx.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fctx, wcs_ext)
                if not virtual:
                    print('Writing out image to disk:', self.outcontext)
                    fctx.writeto(self.outcontext, overwrite=True)
                    del fctx, hdu
                    fctx = None
                # End 'if not virtual'

                outputFITS[self.outcontext] = fctx

        return outputFITS
Exemple #3
0
def writeSingleFITS(data,
                    wcs,
                    output,
                    template,
                    clobber=True,
                    verbose=True,
                    rules_file=None):
    """ Write out a simple FITS file given a numpy array and the name of another
    FITS file to use as a template for the output image header.
    """
    outname, outextn = fileutil.parseFilename(output)
    outextname, outextver = fileutil.parseExtn(outextn)

    if fileutil.findFile(outname):
        if clobber:
            log.info('Deleting previous output product: %s' % outname)
            fileutil.removeFile(outname)

        else:
            log.warning('Output file %s already exists and overwrite not '
                        'specified!' % outname)
            log.error('Quitting... Please remove before resuming operations.')
            raise IOError

    # Now update WCS keywords with values from provided WCS
    if hasattr(wcs.sip, 'a_order'):
        siphdr = True
    else:
        siphdr = False
    wcshdr = wcs.wcs2header(sip2hdr=siphdr)

    if template is not None:
        # Get default headers from multi-extension FITS file
        # If input data is not in MEF FITS format, it will return 'None'
        # NOTE: These are HEADER objects, not HDUs
        (prihdr, scihdr, errhdr,
         dqhdr), newtab = getTemplates(template,
                                       EXTLIST,
                                       rules_file=rules_file)

        if scihdr is None:
            scihdr = fits.Header()
            indx = 0
            for c in prihdr.cards:
                if c.keyword not in ['INHERIT', 'EXPNAME']: indx += 1
                else: break
            for i in range(indx, len(prihdr)):
                scihdr.append(prihdr.cards[i])
            for i in range(indx, len(prihdr)):
                del prihdr[indx]
    else:
        scihdr = fits.Header()
        prihdr = fits.Header()
        # Start by updating PRIMARY header keywords...
        prihdr.set('EXTEND', value=True, after='NAXIS')
        prihdr['FILENAME'] = outname

    if outextname == '':
        outextname = 'sci'
    if outextver == 0: outextver = 1
    scihdr['EXTNAME'] = outextname.upper()
    scihdr['EXTVER'] = outextver
    scihdr.update(wcshdr)

    # Create PyFITS HDUList for all extensions
    outhdu = fits.HDUList()
    # Setup primary header as an HDU ready for appending to output FITS file
    prihdu = fits.PrimaryHDU(header=prihdr)
    scihdu = fits.ImageHDU(header=scihdr, data=data)

    outhdu.append(prihdu)
    outhdu.append(scihdu)
    outhdu.writeto(outname)

    if verbose:
        print('Created output image: %s' % outname)
def buildMaskImage(rootname,bitvalue,output,extname='DQ',extver=1):
    """ Builds mask image from rootname's DQ array
        If there is no valid 'DQ' array in image, then return
        an empty string.
    """

    # If no bitvalue is set or rootname given, assume no mask is desired
    # However, this name would be useful as the output mask from
    # other processing, such as MultiDrizzle, so return it anyway.
    #if bitvalue == None or rootname == None:
    #    return None

    # build output name
    maskname = output

    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname):
        fileutil.removeFile(maskname)

    # Open input file with DQ array
    fdq = fileutil.openImage(rootname,memmap=0,mode='readonly')
    try:
        _extn = fileutil.findExtname(fdq,extname,extver=extver)
        if _extn != None:
            # Read in DQ array
            dqarr = fdq[_extn].data
        else:
            dqarr = None

        # For the case where there is no DQ array,
        # create a mask image of all ones.
        if dqarr == None:
            # We need to get the dimensions of the output DQ array
            # Since the DQ array is non-existent, look for the SCI extension
            _sci_extn = fileutil.findExtname(fdq,'SCI',extver=extver)
            if _sci_extn != None:
                _shape = fdq[_sci_extn].data.shape
                dqarr = np.zeros(_shape,dtype=np.uint16)
            else:
                raise Exception
        # Build mask array from DQ array
        maskarr = buildMask(dqarr,bitvalue)
        #Write out the mask file as simple FITS file
        fmask = fits.open(maskname, 'append')
        maskhdu = fits.PrimaryHDU(data = maskarr)
        fmask.append(maskhdu)

        #Close files
        fmask.close()
        del fmask
        fdq.close()
        del fdq

    except:
        fdq.close()
        del fdq
        # Safeguard against leaving behind an incomplete file
        if fileutil.findFile(maskname):
            os.remove(maskname)
        _errstr = "\nWarning: Problem creating MASK file for "+rootname+".\n"
        #raise IOError, _errstr
        print(_errstr)
        return None

    # Return the name of the mask image written out
    return maskname
def process(inFile,
            force=False,
            newpath=None,
            inmemory=False,
            num_cores=None,
            headerlets=True):
    """ Run astrodrizzle on input file/ASN table
        using default values for astrodrizzle parameters.
    """
    # We only need to import this package if a user run the task
    import drizzlepac
    from drizzlepac import processInput  # used for creating new ASNs for _flc inputs
    import stwcs

    if headerlets:
        from stwcs.wcsutil import headerlet

    # Open the input file
    try:
        # Make sure given filename is complete and exists...
        inFilename = fileutil.buildRootname(inFile, ext=['.fits'])
        if not os.path.exists(inFilename):
            print("ERROR: Input file - %s - does not exist." % inFilename)
            return
    except TypeError:
        print("ERROR: Inappropriate input file.")
        return

    #If newpath was specified, move all files to that directory for processing
    if newpath:
        orig_processing_dir = os.getcwd()
        new_processing_dir = _createWorkingDir(newpath, inFilename)
        _copyToNewWorkingDir(new_processing_dir, inFilename)
        os.chdir(new_processing_dir)

    # Initialize for later use...
    _mname = None
    _new_asn = None
    _calfiles = []

    # Check input file to see if [DRIZ/DITH]CORR is set to PERFORM
    if '_asn' in inFilename:
        # We are working with an ASN table.
        # Use asnutil code to extract filename
        inFilename = _lowerAsn(inFilename)
        _new_asn = [inFilename]
        _asndict = asnutil.readASNTable(inFilename, None, prodonly=False)
        _cal_prodname = _asndict['output'].lower()
        _fname = fileutil.buildRootname(_cal_prodname, ext=['_drz.fits'])

        # Retrieve the first member's rootname for possible use later
        _fimg = fits.open(inFilename)
        for name in _fimg[1].data.field('MEMNAME'):
            if name[-1] != '*':
                _mname = name.split('\0', 1)[0].lower()
                break
        _fimg.close()
        del _fimg

    else:
        # Check to see if input is a _RAW file
        # If it is, strip off the _raw.fits extension...
        _indx = inFilename.find('_raw')
        if _indx < 0: _indx = len(inFilename)
        # ... and build the CALXXX product rootname.
        _mname = fileutil.buildRootname(inFilename[:_indx])
        _cal_prodname = inFilename[:_indx]
        # Reset inFilename to correspond to appropriate input for
        # drizzle: calibrated product name.
        inFilename = _mname

        if _mname == None:
            errorMsg = 'Could not find calibrated product!'
            raise Exception(errorMsg)

    # Create trailer filenames based on ASN output filename or
    # on input name for single exposures
    if '_raw' in inFile:
        # Output trailer file to RAW file's trailer
        _trlroot = inFile[:inFile.find('_raw')]
    elif '_asn' in inFile:
        # Output trailer file to ASN file's trailer, not product's trailer
        _trlroot = inFile[:inFile.find('_asn')]
    else:
        # Default: trim off last suffix of input filename
        # and replacing with .tra
        _indx = inFile.rfind('_')
        if _indx > 0:
            _trlroot = inFile[:_indx]
        else:
            _trlroot = inFile

    _trlfile = _trlroot + '.tra'

    # Open product and read keyword value
    # Check to see if product already exists...
    dkey = 'DRIZCORR'
    # ...if product does NOT exist, interrogate input file
    # to find out whether 'dcorr' has been set to PERFORM
    # Check if user wants to process again regardless of DRIZCORR keyword value
    if force: dcorr = 'PERFORM'
    else:
        if _mname:
            _fimg = fits.open(fileutil.buildRootname(_mname,
                                                     ext=['_raw.fits']))
            _phdr = _fimg['PRIMARY'].header
            if dkey in _phdr:
                dcorr = _phdr[dkey]
            else:
                dcorr = None
            _fimg.close()
            del _fimg
        else:
            dcorr = None

    time_str = _getTime()
    _tmptrl = _trlroot + '_tmp.tra'
    _drizfile = _trlroot + '_pydriz'
    _drizlog = _drizfile + ".log"  # the '.log' gets added automatically by astrodrizzle
    if dcorr == 'PERFORM':
        if '_asn.fits' not in inFilename:
            # Working with a singleton
            # However, we always want to make sure we always use
            # a calibrated product as input, if available.
            _infile = fileutil.buildRootname(_cal_prodname)
            _infile_flc = fileutil.buildRootname(_cal_prodname,
                                                 ext=['_flc.fits'])

            _cal_prodname = _infile
            _inlist = _calfiles = [_infile]

            # Add CTE corrected filename as additional input if present
            if os.path.exists(_infile_flc) and _infile_flc != _infile:
                _inlist.append(_infile_flc)

        else:
            # Working with an ASN table...
            _infile = inFilename
            flist, duplist = processInput.checkForDuplicateInputs(
                _asndict['order'])
            _calfiles = flist
            if len(duplist) > 0:
                origasn = processInput.changeSuffixinASN(inFilename, 'flt')
                dupasn = processInput.changeSuffixinASN(inFilename, 'flc')
                _inlist = [origasn, dupasn]
            else:
                _inlist = [_infile]
            # We want to keep the original specification of the calibration
            # product name, though, not a lower-case version...
            _cal_prodname = inFilename
            _new_asn.extend(_inlist)  # kept so we can delete it when finished

        # Run astrodrizzle and send its processing statements to _trlfile
        _pyver = drizzlepac.astrodrizzle.__version__

        for _infile in _inlist:  # Run astrodrizzle for all inputs
            # Create trailer marker message for start of astrodrizzle processing
            _trlmsg = _timestamp('astrodrizzle started ')
            _trlmsg = _trlmsg + __trlmarker__
            _trlmsg = _trlmsg + '%s: Processing %s with astrodrizzle Version %s\n' % (
                time_str, _infile, _pyver)
            print(_trlmsg)

            # Write out trailer comments to trailer file...
            ftmp = open(_tmptrl, 'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile, _tmptrl)

            _pyd_err = _trlroot + '_pydriz.stderr'

            try:
                b = drizzlepac.astrodrizzle.AstroDrizzle(input=_infile,
                                                         runfile=_drizfile,
                                                         configobj='defaults',
                                                         in_memory=inmemory,
                                                         num_cores=num_cores,
                                                         **pipeline_pars)
            except Exception as errorobj:
                _appendTrlFile(_trlfile, _drizlog)
                _appendTrlFile(_trlfile, _pyd_err)
                _ftrl = open(_trlfile, 'a')
                _ftrl.write(
                    'ERROR: Could not complete astrodrizzle processing of %s.\n'
                    % _infile)
                _ftrl.write(str(sys.exc_info()[0]) + ': ')
                _ftrl.writelines(str(errorobj))
                _ftrl.write('\n')
                _ftrl.close()
                print(
                    'ERROR: Could not complete astrodrizzle processing of %s.'
                    % _infile)
                raise Exception(str(errorobj))

            # Now, append comments created by PyDrizzle to CALXXX trailer file
            print('Updating trailer file %s with astrodrizzle comments.' %
                  _trlfile)
            _appendTrlFile(_trlfile, _drizlog)

        # Save this for when astropy.io.fits can modify a file 'in-place'
        # Update calibration switch
        _fimg = fits.open(_cal_prodname, mode='update')
        _fimg['PRIMARY'].header[dkey] = 'COMPLETE'
        _fimg.close()
        del _fimg

        # Enforce pipeline convention of all lower-case product
        # names
        _prodlist = glob.glob('*drz.fits')
        for _prodname in _prodlist:
            _plower = _prodname.lower()
            if _prodname != _plower: os.rename(_prodname, _plower)

    else:
        # Create default trailer file messages when astrodrizzle is not
        # run on a file.  This will typically apply only to BIAS,DARK
        # and other reference images.
        # Start by building up the message...
        _trlmsg = _timestamp('astrodrizzle skipped ')
        _trlmsg = _trlmsg + __trlmarker__
        _trlmsg = _trlmsg + '%s: astrodrizzle processing not requested for %s.\n' % (
            time_str, inFilename)
        _trlmsg = _trlmsg + '       astrodrizzle will not be run at this time.\n'
        print(_trlmsg)

        # Write message out to temp file and append it to full trailer file
        ftmp = open(_tmptrl, 'w')
        ftmp.writelines(_trlmsg)
        ftmp.close()
        _appendTrlFile(_trlfile, _tmptrl)

    _fmsg = None
    # Append final timestamp to trailer file...
    _final_msg = '%s: Finished processing %s \n' % (time_str, inFilename)
    _final_msg += _timestamp('astrodrizzle completed ')
    _trlmsg += _final_msg
    ftmp = open(_tmptrl, 'w')
    ftmp.writelines(_trlmsg)
    ftmp.close()
    _appendTrlFile(_trlfile, _tmptrl)

    # If we created a new ASN table, we need to remove it
    if _new_asn != None:
        for _name in _new_asn:
            fileutil.removeFile(_name)

    # Clean up any generated OrIg_files directory
    if os.path.exists("OrIg_files"):
        # check to see whether this directory is empty
        flist = glob.glob('OrIg_files/*.fits')
        if len(flist) == 0:
            os.rmdir("OrIg_files")
        else:
            print(
                'OrIg_files directory NOT removed as it still contained images...'
            )
    if headerlets:
        # Generate headerlets for each updated FLT image
        hlet_msg = _timestamp("Writing Headerlets started")
        for fname in _calfiles:
            frootname = fileutil.buildNewRootname(fname)
            hname = "%s_flt_hlet.fits" % frootname
            hlet_msg += "Created Headerlet file %s \n" % hname
            try:
                headerlet.write_headerlet(
                    fname,
                    'OPUS',
                    output='flt',
                    wcskey='PRIMARY',
                    author="OPUS",
                    descrip="Default WCS from Pipeline Calibration",
                    attach=False,
                    clobber=True,
                    logging=False)
            except ValueError:
                hlet_msg += _timestamp(
                    "SKIPPED: Headerlet not created for %s \n" % fname)
                # update trailer file to log creation of headerlet files
        hlet_msg += _timestamp("Writing Headerlets completed")
        ftrl = open(_trlfile, 'a')
        ftrl.write(hlet_msg)
        ftrl.close()

    # If processing was done in a temp working dir, restore results to original
    # processing directory, return to original working dir and remove temp dir
    if newpath:
        _restoreResults(new_processing_dir, orig_processing_dir)
        os.chdir(orig_processing_dir)
        _removeWorkingDir(new_processing_dir)

    # Provide feedback to user
    print(_final_msg)
Exemple #6
0
def buildMaskImage(rootname, bitvalue, output, extname='DQ', extver=1):
    """ Builds mask image from rootname's DQ array
        If there is no valid 'DQ' array in image, then return
        an empty string.
    """

    # If no bitvalue is set or rootname given, assume no mask is desired
    # However, this name would be useful as the output mask from
    # other processing, such as MultiDrizzle, so return it anyway.
    #if bitvalue == None or rootname == None:
    #    return None

    # build output name
    maskname = output

    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname):
        fileutil.removeFile(maskname)

    # Open input file with DQ array
    fdq = fileutil.openImage(rootname, mode='readonly', memmap=False)
    try:
        _extn = fileutil.findExtname(fdq, extname, extver=extver)
        if _extn is not None:
            # Read in DQ array
            dqarr = fdq[_extn].data
        else:
            dqarr = None

        # For the case where there is no DQ array,
        # create a mask image of all ones.
        if dqarr is None:
            # We need to get the dimensions of the output DQ array
            # Since the DQ array is non-existent, look for the SCI extension
            _sci_extn = fileutil.findExtname(fdq, 'SCI', extver=extver)
            if _sci_extn is not None:
                _shape = fdq[_sci_extn].data.shape
                dqarr = np.zeros(_shape, dtype=np.uint16)
            else:
                raise Exception
        # Build mask array from DQ array
        maskarr = buildMask(dqarr, bitvalue)
        #Write out the mask file as simple FITS file
        fmask = fits.open(maskname, mode='append', memmap=False)
        maskhdu = fits.PrimaryHDU(data=maskarr)
        fmask.append(maskhdu)

        #Close files
        fmask.close()
        del fmask
        fdq.close()
        del fdq

    except:
        fdq.close()
        del fdq
        # Safeguard against leaving behind an incomplete file
        if fileutil.findFile(maskname):
            os.remove(maskname)
        _errstr = "\nWarning: Problem creating MASK file for " + rootname + ".\n"
        #raise IOError, _errstr
        print(_errstr)
        return None

    # Return the name of the mask image written out
    return maskname
def buildShadowMaskImage(dqfile,detnum,extnum,maskname,bitvalue=None,binned=1):
    """ Builds mask image from WFPC2 shadow calibrations.
      detnum - string value for 'DETECTOR' detector
    """
    # insure detnum is a string
    if type(detnum) != type(''):
        detnum = repr(detnum)

    _funcroot = '_func_Shadow_WF'

    # build template shadow mask's filename

    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname):
        fileutil.removeFile(maskname)

    _use_inmask = False
    if fileutil.findFile(dqfile) != True or bitvalue == None:
        _use_inmask = True

    # Check for existance of input .c1h file for use in making inmask file
    if fileutil.findFile(dqfile) != True or bitvalue is None:
        #_mask = 'wfpc2_inmask'+detnum+'.fits'
        _mask = maskname
        # Check to see if file exists...
        if _use_inmask and not fileutil.findFile(_mask):
        # If not, create the file.
        # This takes a long time to run, so it should be done
        # only when absolutely necessary...
            try:
                _funcx = _funcroot+detnum+'x'
                _funcy = _funcroot+detnum+'y'

                _xarr = np.clip(np.fromfunction(eval(_funcx),(800,800)),0.0,1.0).astype(np.uint8)
                _yarr = np.clip(np.fromfunction(eval(_funcy),(800,800)),0.0,1.0).astype(np.uint8)
                maskarr = _xarr * _yarr

                if binned !=1:
                    bmaskarr = maskarr[::2,::2]
                    bmaskarr *= maskarr[1::2,::2]
                    bmaskarr *= maskarr[::2,1::2]
                    bmaskarr *= maskarr[1::2,1::2]
                    maskarr = bmaskarr.copy()
                    del bmaskarr

                #Write out the mask file as simple FITS file
                fmask = fits.open(_mask,'append')
                maskhdu = fits.PrimaryHDU(data=maskarr)
                fmask.append(maskhdu)

                #Close files
                fmask.close()
                del fmask
            except:
                return None

    else:
        #
        # Build full mask based on .c1h and shadow mask
        #
        fdq = fileutil.openImage(dqfile)
        #fsmask = fits.open(_mask,memmap=1,mode='readonly')
        try:
            # Read in DQ array from .c1h and from shadow mask files
            dqarr = fdq[int(extnum)].data
            #maskarr = fsmask[0].data

            # Build mask array from DQ array
            dqmaskarr = buildMask(dqarr,bitvalue)

            #Write out the mask file as simple FITS file
            fdqmask = fits.open(maskname,'append')
            maskhdu = fits.PrimaryHDU(data=dqmaskarr)
            fdqmask.append(maskhdu)

            #Close files
            fdqmask.close()
            del fdqmask
            fdq.close()
            del fdq

        except:
            fdq.close()
            del fdq
            # Safeguard against leaving behind an incomplete file
            if fileutil.findFile(maskname):
                os.remove(maskname)
            _errstr = "\nWarning: Problem creating DQMASK file for "+rootname+".\n"
            #raise IOError, _errstr
            print(_errstr)
            return None


    # Return the name of the mask image written out
    return maskname
Exemple #8
0
def writeSingleFITS(data,wcs,output,template,clobber=True,verbose=True):
    """ Write out a simple FITS file given a numpy array and the name of another
    FITS file to use as a template for the output image header.
    """
    outname,outextn = fileutil.parseFilename(output)
    outextname,outextver = fileutil.parseExtn(outextn)

    if fileutil.findFile(outname):
        if clobber:
            log.info('Deleting previous output product: %s' % outname)
            fileutil.removeFile(outname)

        else:
            log.warning('Output file %s already exists and overwrite not '
                        'specified!' % outname)
            log.error('Quitting... Please remove before resuming operations.')
            raise IOError

    # Now update WCS keywords with values from provided WCS
    if hasattr(wcs.sip,'a_order'):
        siphdr = True
    else:
        siphdr = False
    wcshdr = wcs.wcs2header(sip2hdr=siphdr)

    if template is not None:
        # Get default headers from multi-extension FITS file
        # If input data is not in MEF FITS format, it will return 'None'
        # NOTE: These are HEADER objects, not HDUs
        (prihdr,scihdr,errhdr,dqhdr),newtab = getTemplates(template,EXTLIST)

        if scihdr is None:
            scihdr = fits.Header()
            indx = 0
            for c in prihdr.cards:
                if c.keyword not in ['INHERIT','EXPNAME']: indx += 1
                else: break
            for i in range(indx,len(prihdr)):
                scihdr.append(prihdr.cards[i])
            for i in range(indx, len(prihdr)):
                del prihdr[indx]
    else:
        scihdr = fits.Header()
        prihdr = fits.Header()
        # Start by updating PRIMARY header keywords...
        prihdr.set('EXTEND', value=True, after='NAXIS')
        prihdr['FILENAME'] = outname

    if outextname == '':
        outextname = 'sci'
    if outextver == 0: outextver = 1
    scihdr['EXTNAME'] = outextname.upper()
    scihdr['EXTVER'] = outextver

    for card in wcshdr.cards:
        scihdr[card.keyword] = (card.value, card.comment)

    # Create PyFITS HDUList for all extensions
    outhdu = fits.HDUList()
    # Setup primary header as an HDU ready for appending to output FITS file
    prihdu = fits.PrimaryHDU(header=prihdr)
    scihdu = fits.ImageHDU(header=scihdr,data=data)

    outhdu.append(prihdu)
    outhdu.append(scihdu)
    outhdu.writeto(outname)

    if verbose:
        print('Created output image: %s' % outname)
Exemple #9
0
def buildShadowMaskImage(dqfile,
                         detnum,
                         extnum,
                         maskname,
                         bitvalue=None,
                         binned=1):
    """ Builds mask image from WFPC2 shadow calibrations.
      detnum - string value for 'DETECTOR' detector
    """
    # insure detnum is a string
    if type(detnum) != type(''):
        detnum = repr(detnum)

    _funcroot = '_func_Shadow_WF'

    # build template shadow mask's filename

    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname):
        fileutil.removeFile(maskname)

    _use_inmask = not fileutil.findFile(dqfile) or bitvalue is None

    # Check for existance of input .c1h file for use in making inmask file
    if _use_inmask:
        #_mask = 'wfpc2_inmask'+detnum+'.fits'
        _mask = maskname
        # Check to see if file exists...
        if not fileutil.findFile(_mask):
            # If not, create the file.
            # This takes a long time to run, so it should be done
            # only when absolutely necessary...
            try:
                _funcx = _funcroot + detnum + 'x'
                _funcy = _funcroot + detnum + 'y'

                _xarr = np.clip(np.fromfunction(eval(_funcx), (800, 800)), 0.0,
                                1.0).astype(np.uint8)
                _yarr = np.clip(np.fromfunction(eval(_funcy), (800, 800)), 0.0,
                                1.0).astype(np.uint8)
                maskarr = _xarr * _yarr

                if binned != 1:
                    bmaskarr = maskarr[::2, ::2]
                    bmaskarr *= maskarr[1::2, ::2]
                    bmaskarr *= maskarr[::2, 1::2]
                    bmaskarr *= maskarr[1::2, 1::2]
                    maskarr = bmaskarr.copy()
                    del bmaskarr

                #Write out the mask file as simple FITS file
                fmask = fits.open(_mask, mode='append', memmap=False)
                maskhdu = fits.PrimaryHDU(data=maskarr)
                fmask.append(maskhdu)

                #Close files
                fmask.close()
                del fmask
            except:
                return None

    else:
        #
        # Build full mask based on .c1h and shadow mask
        #
        fdq = fileutil.openImage(dqfile, mode='readonly', memmap=False)
        try:
            # Read in DQ array from .c1h and from shadow mask files
            dqarr = fdq[int(extnum)].data
            #maskarr = fsmask[0].data

            # Build mask array from DQ array
            dqmaskarr = buildMask(dqarr, bitvalue)

            #Write out the mask file as simple FITS file
            fdqmask = fits.open(maskname, mode='append', memmap=False)
            maskhdu = fits.PrimaryHDU(data=dqmaskarr)
            fdqmask.append(maskhdu)

            #Close files
            fdqmask.close()
            del fdqmask
            fdq.close()
            del fdq

        except:
            fdq.close()
            del fdq
            # Safeguard against leaving behind an incomplete file
            if fileutil.findFile(maskname):
                os.remove(maskname)
            _errstr = "\nWarning: Problem creating DQMASK file for " + rootname + ".\n"
            #raise IOError, _errstr
            print(_errstr)
            return None

    # Return the name of the mask image written out
    return maskname
Exemple #10
0
    def writeFITS(self, template, sciarr, whtarr, ctxarr=None,
                versions=None, overwrite=yes, blend=True, virtual=False):
        """
        Generate PyFITS objects for each output extension
        using the file given by 'template' for populating
        headers.

        The arrays will have the size specified by 'shape'.
        """
        if not isinstance(template, list):
            template = [template]

        if fileutil.findFile(self.output):
            if overwrite:
                log.info('Deleting previous output product: %s' % self.output)
                fileutil.removeFile(self.output)

            else:
                log.warning('Output file %s already exists and overwrite not '
                            'specified!' % self.output)
                log.error('Quitting... Please remove before resuming '
                          'operations.')
                raise IOError

        # initialize output value for this method
        outputFITS = {}
        # Default value for NEXTEND when 'build'== True
        nextend = 3
        if not self.build:
            nextend = 0
            if self.outweight:
                if overwrite:
                    if fileutil.findFile(self.outweight):
                        log.info('Deleting previous output WHT product: %s' %
                                 self.outweight)
                    fileutil.removeFile(self.outweight)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outweight)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError


            if self.outcontext:
                if overwrite:
                    if fileutil.findFile(self.outcontext):
                        log.info('Deleting previous output CTX product: %s' %
                                 self.outcontext)
                    fileutil.removeFile(self.outcontext)
                else:
                    log.warning('Output file %s already exists and overwrite '
                                'not specified!' % self.outcontext)
                    log.error('Quitting... Please remove before resuming '
                              'operations.')
                    raise IOError


        # Get default headers from multi-extension FITS file
        # If only writing out single drizzle product, blending needs to be
        # forced off as there is only 1 input to report, no blending needed
        if self.single:
            blend=False

        # If input data is not in MEF FITS format, it will return 'None'
        # and those headers will have to be generated from drizzle output
        # file FITS headers.
        # NOTE: These are HEADER objects, not HDUs
        #prihdr,scihdr,errhdr,dqhdr = getTemplates(template)
        self.fullhdrs, intab = getTemplates(template, blend=False)

        newhdrs, newtab = getTemplates(template,blend=blend)
        if newtab is not None: nextend += 1 # account for new table extn

        prihdr = newhdrs[0]
        scihdr = newhdrs[1]
        errhdr = newhdrs[2]
        dqhdr = newhdrs[3]

        # Setup primary header as an HDU ready for appending to output FITS file
        prihdu = fits.PrimaryHDU(header=prihdr, data=None)

        # Start by updating PRIMARY header keywords...
        prihdu.header.set('EXTEND', value=True, after='NAXIS')
        prihdu.header['NEXTEND'] = nextend
        prihdu.header['FILENAME'] = self.output
        prihdu.header['PROD_VER'] = 'DrizzlePac {}'.format(version.__version__)

        # Update the ROOTNAME with the new value as well
        _indx = self.output.find('_drz')
        if _indx < 0:
            rootname_val = self.output
        else:
            rootname_val = self.output[:_indx]
        prihdu.header['ROOTNAME'] = rootname_val


        # Get the total exposure time for the image
        # If not calculated by PyDrizzle and passed through
        # the pardict, then leave value from the template image.
        if self.texptime:
            prihdu.header['EXPTIME'] = self.texptime
            prihdu.header.set('TEXPTIME', value=self.texptime, after='EXPTIME')
            prihdu.header['EXPSTART'] = self.expstart
            prihdu.header['EXPEND'] = self.expend

        #Update ASN_MTYPE to reflect the fact that this is a product
        # Currently hard-wired to always output 'PROD-DTH' as MTYPE
        prihdu.header['ASN_MTYP'] = 'PROD-DTH'

        # Update DITHCORR calibration keyword if present
        # Remove when we can modify FITS headers in place...
        if 'DRIZCORR' in prihdu.header:
            prihdu.header['DRIZCORR'] = 'COMPLETE'
        if 'DITHCORR' in prihdu.header:
            prihdu.header['DITHCORR'] = 'COMPLETE'

        prihdu.header['NDRIZIM'] =(len(self.parlist),
                                   'Drizzle, No. images drizzled onto output')

        # Only a subset of these keywords makes sense for the new WCS based
        # transformations. They need to be reviewed to decide what to keep
        # and what to leave out.
        if not self.blot:
            self.addDrizKeywords(prihdu.header,versions)

        if scihdr:
            try:
                del scihdr['OBJECT']
            except KeyError:
                pass

            if 'CCDCHIP' in scihdr: scihdr['CCDCHIP'] = '-999'
            if 'NCOMBINE' in scihdr:
                scihdr['NCOMBINE'] = self.parlist[0]['nimages']

            # If BUNIT keyword was found and reset, then
            bunit_last_kw = self.find_kwupdate_location(scihdr,'bunit')
            if self.bunit is not None:
                comment_str = "Units of science product"
                if self.bunit.lower()[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                scihdr.set('BUNIT', value=self.bunit,
                           comment=comment_str,
                           after=bunit_last_kw)
            else:
                # check to see whether to update already present BUNIT comment
                if 'bunit' in scihdr and scihdr['bunit'].lower()[:5] == 'count':
                    comment_str = "counts * gain = electrons"
                    scihdr.set('BUNIT', value=scihdr['bunit'],
                               comment=comment_str,
                               after=bunit_last_kw)

            # Add WCS keywords to SCI header
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(scihdr,'CD1_1')
                addWCSKeywords(self.wcs,scihdr,blot=self.blot,
                                single=self.single, after=pre_wcs_kw)
                # Recompute this after removing distortion kws
                pre_wcs_kw = self.find_kwupdate_location(scihdr,'CD1_1')

        ##########
        # Now, build the output file
        ##########
        if self.build:
            print('-Generating multi-extension output file: ',self.output)
            fo = fits.HDUList()

            # Add primary header to output file...
            fo.append(prihdu)

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=sciarr, header=scihdr, name=EXTLIST[0])
            else:
                hdu = fits.ImageHDU(data=sciarr, header=scihdr, name=EXTLIST[0])
            last_kw = self.find_kwupdate_location(scihdr,'EXTNAME')
            hdu.header.set('EXTNAME', value='SCI', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            fo.append(hdu)

            # Build WHT extension here, if requested...
            if errhdr:
                errhdr['CCDCHIP'] = '-999'

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=whtarr, header=errhdr, name=EXTLIST[1])
            else:
                hdu = fits.ImageHDU(data=whtarr, header=errhdr, name=EXTLIST[1])
            last_kw = self.find_kwupdate_location(errhdr,'EXTNAME')
            hdu.header.set('EXTNAME', value='WHT', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')
            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header,'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,hdu.header,blot=self.blot,
                               single=self.single, after=pre_wcs_kw)
            fo.append(hdu)

            # Build CTX extension here
            # If there is only 1 plane, write it out as a 2-D extension
            if self.outcontext:
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr
            else:
                _ctxarr = None

            if self.single and self.compress:
                hdu = fits.CompImageHDU(data=_ctxarr, header=dqhdr, name=EXTLIST[2])
            else:
                hdu = fits.ImageHDU(data=_ctxarr, header=dqhdr, name=EXTLIST[2])
            last_kw = self.find_kwupdate_location(dqhdr,'EXTNAME')
            hdu.header.set('EXTNAME', value='CTX', after=last_kw)
            hdu.header.set('EXTVER', value=1, after='EXTNAME')

            if self.wcs:
                pre_wcs_kw = self.find_kwupdate_location(hdu.header,'CD1_1')
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                addWCSKeywords(self.wcs,hdu.header,blot=self.blot,
                               single=self.single, after=pre_wcs_kw)
            fo.append(hdu)

            # remove all alternate WCS solutions from headers of this product
            wcs_functions.removeAllAltWCS(fo,[1])

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual:
                print('Writing out to disk:',self.output)
                # write out file to disk
                fo.writeto(self.output)
                fo.close()
                del fo, hdu
                fo = None
            # End 'if not virtual'
            outputFITS[self.output]= fo

        else:
            print('-Generating simple FITS output: %s' % self.outdata)

            fo = fits.HDUList()
            hdu_header = prihdu.header.copy()
            del hdu_header['nextend']

            # Append remaining unique header keywords from template DQ
            # header to Primary header...
            if scihdr:
                for _card in scihdr.cards:
                    if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu_header:
                        hdu_header.append(_card)
            for kw in ['PCOUNT', 'GCOUNT']:
                try:
                    del kw
                except KeyError:
                    pass
            hdu_header['filename'] = self.outdata

            if self.compress:
                hdu = fits.CompImageHDU(data=sciarr, header=hdu_header)
                wcs_ext = [1]
            else:
                hdu = fits.ImageHDU(data=sciarr, header=hdu_header)
                wcs_ext = [0]

            # explicitly set EXTEND to FALSE for simple FITS files.
            dim = len(sciarr.shape)
            hdu.header.set('extend', value=False, after='NAXIS%s'%dim)

            # Add primary header to output file...
            fo.append(hdu)

            # remove all alternate WCS solutions from headers of this product
            logutil.logging.disable(logutil.logging.INFO)
            wcs_functions.removeAllAltWCS(fo,wcs_ext)
            logutil.logging.disable(logutil.logging.NOTSET)

            # add table of combined header keyword values to FITS file
            if newtab is not None:
                fo.append(newtab)

            if not virtual:
                print('Writing out image to disk:',self.outdata)
                # write out file to disk
                fo.writeto(self.outdata)
                del fo,hdu
                fo = None
            # End 'if not virtual'
            outputFITS[self.outdata]= fo

            if self.outweight and whtarr is not None:
                # We need to build new PyFITS objects for each WHT array
                fwht = fits.HDUList()

                if errhdr:
                    errhdr['CCDCHIP'] = '-999'

                if self.compress:
                    hdu = fits.CompImageHDU(data=whtarr, header=prihdu.header)
                else:
                    hdu = fits.ImageHDU(data=whtarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if errhdr:
                    for _card in errhdr.cards:
                        if _card.keyword not in RESERVED_KEYS and _card.keyword not in hdu.header:
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outweight
                hdu.header['CCDCHIP'] = '-999'
                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(hdu.header,'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,hdu.header, blot=self.blot,
                                   single=self.single, after=pre_wcs_kw)

                # Add primary header to output file...
                fwht.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fwht,wcs_ext)

                if not virtual:
                    print('Writing out image to disk:',self.outweight)
                    fwht.writeto(self.outweight)
                    del fwht,hdu
                    fwht = None
                # End 'if not virtual'
                outputFITS[self.outweight]= fwht

            # If a context image was specified, build a PyFITS object
            # for it as well...
            if self.outcontext and ctxarr is not None:
                fctx = fits.HDUList()

                # If there is only 1 plane, write it out as a 2-D extension
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr

                if self.compress:
                    hdu = fits.CompImageHDU(data=_ctxarr, header=prihdu.header)
                else:
                    hdu = fits.ImageHDU(data=_ctxarr, header=prihdu.header)
                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if dqhdr:
                    for _card in dqhdr.cards:
                        if ( (_card.keyword not in RESERVED_KEYS) and
                             _card.keyword not in hdu.header):
                            hdu.header.append(_card)
                hdu.header['filename'] = self.outcontext
                if self.wcs:
                    pre_wcs_kw = self.find_kwupdate_location(hdu.header,'CD1_1')
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    addWCSKeywords(self.wcs,hdu.header, blot=self.blot,
                                   single=self.single, after=pre_wcs_kw)

                fctx.append(hdu)
                # remove all alternate WCS solutions from headers of this product
                wcs_functions.removeAllAltWCS(fctx,wcs_ext)
                if not virtual:
                    print('Writing out image to disk:',self.outcontext)
                    fctx.writeto(self.outcontext)
                    del fctx,hdu
                    fctx = None
                # End 'if not virtual'

                outputFITS[self.outcontext]= fctx

        return outputFITS
Exemple #11
0
def process(inFile,force=False,newpath=None, inmemory=False, num_cores=None,
            headerlets=True):
    """ Run astrodrizzle on input file/ASN table
        using default values for astrodrizzle parameters.
    """
    # We only need to import this package if a user run the task
    import drizzlepac
    from drizzlepac import processInput # used for creating new ASNs for _flc inputs
    import stwcs

    if headerlets:
        from stwcs.wcsutil import headerlet

    # Open the input file
    try:
        # Make sure given filename is complete and exists...
        inFilename = fileutil.buildRootname(inFile,ext=['.fits'])
        if not os.path.exists(inFilename):
            print("ERROR: Input file - %s - does not exist." % inFilename)
            return
    except TypeError:
        print("ERROR: Inappropriate input file.")
        return

    #If newpath was specified, move all files to that directory for processing
    if newpath:
        orig_processing_dir = os.getcwd()
        new_processing_dir = _createWorkingDir(newpath,inFilename)
        _copyToNewWorkingDir(new_processing_dir,inFilename)
        os.chdir(new_processing_dir)

    # Initialize for later use...
    _mname = None
    _new_asn = None
    _calfiles = []

    # Identify WFPC2 inputs to account for differences in WFPC2 inputs
    wfpc2_input = fits.getval(inFilename, 'instrume') == 'WFPC2'
    cal_ext = None

    # Check input file to see if [DRIZ/DITH]CORR is set to PERFORM
    if '_asn' in inFilename:
        # We are working with an ASN table.
        # Use asnutil code to extract filename
        inFilename = _lowerAsn(inFilename)
        _new_asn = [inFilename]
        _asndict = asnutil.readASNTable(inFilename,None,prodonly=False)
        _cal_prodname = _asndict['output'].lower()
        _fname = fileutil.buildRootname(_cal_prodname,ext=['_drz.fits'])

        # Retrieve the first member's rootname for possible use later
        _fimg = fits.open(inFilename, memmap=False)
        for name in _fimg[1].data.field('MEMNAME'):
            if name[-1] != '*':
                _mname = name.split('\0', 1)[0].lower()
                break
        _fimg.close()
        del _fimg

    else:
        # Check to see if input is a _RAW file
        # If it is, strip off the _raw.fits extension...
        _indx = inFilename.find('_raw')
        if _indx < 0: _indx = len(inFilename)
        # ... and build the CALXXX product rootname.
        if wfpc2_input:
            # force code to define _c0m file as calibrated product to be used
            cal_ext = ['_c0m.fits']
        _mname = fileutil.buildRootname(inFilename[:_indx], ext=cal_ext)

        _cal_prodname = inFilename[:_indx]
        # Reset inFilename to correspond to appropriate input for
        # drizzle: calibrated product name.
        inFilename = _mname

        if _mname is None:
            errorMsg = 'Could not find calibrated product!'
            raise Exception(errorMsg)

    # Create trailer filenames based on ASN output filename or
    # on input name for single exposures
    if '_raw' in inFile:
        # Output trailer file to RAW file's trailer
        _trlroot = inFile[:inFile.find('_raw')]
    elif '_asn' in inFile:
        # Output trailer file to ASN file's trailer, not product's trailer
        _trlroot = inFile[:inFile.find('_asn')]
    else:
        # Default: trim off last suffix of input filename
        # and replacing with .tra
        _indx = inFile.rfind('_')
        if _indx > 0:
            _trlroot = inFile[:_indx]
        else:
            _trlroot = inFile

    _trlfile = _trlroot + '.tra'

    # Open product and read keyword value
    # Check to see if product already exists...
    dkey = 'DRIZCORR'
    # ...if product does NOT exist, interrogate input file
    # to find out whether 'dcorr' has been set to PERFORM
    # Check if user wants to process again regardless of DRIZCORR keyword value
    if force:
        dcorr = 'PERFORM'
    else:
        if _mname :
            _fimg = fits.open(fileutil.buildRootname(_mname,ext=['_raw.fits']), memmap=False)
            _phdr = _fimg['PRIMARY'].header
            if dkey in _phdr:
                dcorr = _phdr[dkey]
            else:
                dcorr = None
            _fimg.close()
            del _fimg
        else:
            dcorr = None

    time_str = _getTime()
    _tmptrl = _trlroot + '_tmp.tra'
    _drizfile = _trlroot + '_pydriz'
    _drizlog = _drizfile + ".log" # the '.log' gets added automatically by astrodrizzle
    if dcorr == 'PERFORM':
        if '_asn.fits' not in inFilename:
            # Working with a singleton
            # However, we always want to make sure we always use
            # a calibrated product as input, if available.
            _infile = fileutil.buildRootname(_cal_prodname, ext=cal_ext)
            _infile_flc = fileutil.buildRootname(_cal_prodname,ext=['_flc.fits'])

            _cal_prodname = _infile
            _inlist = _calfiles = [_infile]

            # Add CTE corrected filename as additional input if present
            if os.path.exists(_infile_flc) and _infile_flc != _infile:
                _inlist.append(_infile_flc)

        else:
            # Working with an ASN table...
            _infile = inFilename
            flist,duplist = processInput.checkForDuplicateInputs(_asndict['order'])
            _calfiles = flist
            if len(duplist) > 0:
                origasn = processInput.changeSuffixinASN(inFilename,'flt')
                dupasn = processInput.changeSuffixinASN(inFilename,'flc')
                _inlist = [origasn,dupasn]
            else:
                _inlist = [_infile]
            # We want to keep the original specification of the calibration
            # product name, though, not a lower-case version...
            _cal_prodname = inFilename
            _new_asn.extend(_inlist) # kept so we can delete it when finished


        # Run astrodrizzle and send its processing statements to _trlfile
        _pyver = drizzlepac.astrodrizzle.__version__

        for _infile in _inlist: # Run astrodrizzle for all inputs
            # Create trailer marker message for start of astrodrizzle processing
            _trlmsg = _timestamp('astrodrizzle started ')
            _trlmsg = _trlmsg+ __trlmarker__
            _trlmsg = _trlmsg + '%s: Processing %s with astrodrizzle Version %s\n' % (time_str,_infile,_pyver)
            print(_trlmsg)

            # Write out trailer comments to trailer file...
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)

            _pyd_err = _trlroot+'_pydriz.stderr'

            try:
                b = drizzlepac.astrodrizzle.AstroDrizzle(input=_infile,runfile=_drizfile,
                                            configobj='defaults',in_memory=inmemory,
                                            num_cores=num_cores, **pipeline_pars)
            except Exception as errorobj:
                _appendTrlFile(_trlfile,_drizlog)
                _appendTrlFile(_trlfile,_pyd_err)
                _ftrl = open(_trlfile,'a')
                _ftrl.write('ERROR: Could not complete astrodrizzle processing of %s.\n' % _infile)
                _ftrl.write(str(sys.exc_info()[0])+': ')
                _ftrl.writelines(str(errorobj))
                _ftrl.write('\n')
                _ftrl.close()
                print('ERROR: Could not complete astrodrizzle processing of %s.' % _infile)
                raise Exception(str(errorobj))

            # Now, append comments created by PyDrizzle to CALXXX trailer file
            print('Updating trailer file %s with astrodrizzle comments.' % _trlfile)
            _appendTrlFile(_trlfile,_drizlog)

        # Save this for when astropy.io.fits can modify a file 'in-place'
        # Update calibration switch
        _fimg = fits.open(_cal_prodname, mode='update', memmap=False)
        _fimg['PRIMARY'].header[dkey] = 'COMPLETE'
        _fimg.close()
        del _fimg

        # Enforce pipeline convention of all lower-case product
        # names
        _prodlist = glob.glob('*drz.fits')
        for _prodname in _prodlist:
            _plower = _prodname.lower()
            if _prodname != _plower:  os.rename(_prodname,_plower)

    else:
        # Create default trailer file messages when astrodrizzle is not
        # run on a file.  This will typically apply only to BIAS,DARK
        # and other reference images.
        # Start by building up the message...
        _trlmsg = _timestamp('astrodrizzle skipped ')
        _trlmsg = _trlmsg + __trlmarker__
        _trlmsg = _trlmsg + '%s: astrodrizzle processing not requested for %s.\n' % (time_str,inFilename)
        _trlmsg = _trlmsg + '       astrodrizzle will not be run at this time.\n'
        print(_trlmsg)

        # Write message out to temp file and append it to full trailer file
        ftmp = open(_tmptrl,'w')
        ftmp.writelines(_trlmsg)
        ftmp.close()
        _appendTrlFile(_trlfile,_tmptrl)

    _fmsg = None
    # Append final timestamp to trailer file...
    _final_msg = '%s: Finished processing %s \n' % (time_str,inFilename)
    _final_msg += _timestamp('astrodrizzle completed ')
    _trlmsg += _final_msg
    ftmp = open(_tmptrl,'w')
    ftmp.writelines(_trlmsg)
    ftmp.close()
    _appendTrlFile(_trlfile,_tmptrl)

    # If we created a new ASN table, we need to remove it
    if _new_asn is not None:
        for _name in _new_asn: fileutil.removeFile(_name)

    # Clean up any generated OrIg_files directory
    if os.path.exists("OrIg_files"):
        # check to see whether this directory is empty
        flist = glob.glob('OrIg_files/*.fits')
        if len(flist) == 0:
            os.rmdir("OrIg_files")
        else:
            print('OrIg_files directory NOT removed as it still contained images...')
    if headerlets:
        # Generate headerlets for each updated FLT image
        hlet_msg = _timestamp("Writing Headerlets started")
        for fname in _calfiles:
            frootname = fileutil.buildNewRootname(fname)
            hname = "%s_flt_hlet.fits"%frootname
            hlet_msg += "Created Headerlet file %s \n"%hname
            try:
                headerlet.write_headerlet(fname,'OPUS',output='flt', wcskey='PRIMARY',
                    author="OPUS",descrip="Default WCS from Pipeline Calibration",
                    attach=False,clobber=True,logging=False)
            except ValueError:
                hlet_msg += _timestamp("SKIPPED: Headerlet not created for %s \n"%fname)
                # update trailer file to log creation of headerlet files
        hlet_msg += _timestamp("Writing Headerlets completed")
        ftrl = open(_trlfile,'a')
        ftrl.write(hlet_msg)
        ftrl.close()

    # If processing was done in a temp working dir, restore results to original
    # processing directory, return to original working dir and remove temp dir
    if newpath:
        _restoreResults(new_processing_dir,orig_processing_dir)
        os.chdir(orig_processing_dir)
        _removeWorkingDir(new_processing_dir)

    # Provide feedback to user
    print(_final_msg)
Exemple #12
0
def buildShadowMaskImage(rootname,detnum,extnum,maskname,replace=yes,bitvalue=None,binned=1):
    """ Builds mask image from WFPC2 shadow calibrations.
      detnum - string value for 'DETECTOR' detector
    """
    # insure detnum is a string
    if  type(detnum) != type(''):
        detnum = repr(detnum)

    _funcroot = '_func_Shadow_WF'

    # build template shadow mask's filename
    _mask = 'wfpc2_inmask'+detnum+'.fits'

    """
    if rootname != None:
        maskname = buildMaskName(fileutil.buildNewRootname(rootname),detnum)
    else:
        maskname = None
    """
    # If an old version of the maskfile was present, remove it and rebuild it.
    if fileutil.findFile(maskname) and replace:
        fileutil.removeFile(maskname)

    # Read in info from .c1h file to add flagged bad pixels to final mask
    _indx = rootname.find('.c1h')
    if _indx < 0: _indx = len(rootname)
    if rootname.find('.fits') < 0:
        _dqname = rootname[:_indx]+'.c1h'
    else:
        _dqname = rootname

    _use_inmask = False
    if fileutil.findFile(_dqname) != yes or bitvalue == None:
        _use_inmask = True
    # Check to see if file exists...
    if _use_inmask and not fileutil.findFile(_mask):
    # If not, create the file.
    # This takes a long time to run, so it should be done
    # only when absolutely necessary...
        try:
            _funcx = _funcroot+detnum+'x'
            _funcy = _funcroot+detnum+'y'

            _xarr = np.clip(np.fromfunction(eval(_funcx),(800,800)),0.0,1.0).astype(np.uint8)
            _yarr = np.clip(np.fromfunction(eval(_funcy),(800,800)),0.0,1.0).astype(np.uint8)
            maskarr = _xarr * _yarr

            if binned !=1:
                print('in buildmask', binned)
                bmaskarr = maskarr[::2,::2]
                bmaskarr *= maskarr[1::2,::2]
                bmaskarr *= maskarr[::2,1::2]
                bmaskarr *= maskarr[1::2,1::2]
                maskarr = bmaskarr.copy()
                del bmaskarr

            #Write out the mask file as simple FITS file
            fmask = pyfits.open(_mask,'append')
            maskhdu = pyfits.PrimaryHDU(data=maskarr)
            fmask.append(maskhdu)

            #Close files
            fmask.close()
            del fmask
        except:
            return None


    # Check for existance of input .c1h file for use in making inmask file
    if fileutil.findFile(_dqname) != yes:
        print('DQ file ',_dqname,' NOT found...')
        print('Copying ',_mask,'to ',maskname,' as input mask file.')
        # Now, copy template mask file to output file, if necessary
        fileutil.copyFile(_mask,maskname,replace=yes)
    elif bitvalue == None:
        # If bitvalue was not set, then do not use anything but shadow mask
        fileutil.copyFile(_mask,maskname,replace=yes)
    else:
        #
        # Build full mask based on .c1h and shadow mask
        #
        fdq = fileutil.openImage(_dqname)
        #fsmask = pyfits.open(_mask,memmap=1,mode='readonly')
        try:
            # Read in DQ array from .c1h and from shadow mask files
            dqarr = fdq[int(extnum)].data
            #maskarr = fsmask[0].data

            # Build mask array from DQ array
            dqmaskarr = buildMask(dqarr,bitvalue)

            #Write out the mask file as simple FITS file
            fdqmask = pyfits.open(maskname,'append')
            maskhdu = pyfits.PrimaryHDU(data=dqmaskarr)
            fdqmask.append(maskhdu)

            #Close files
            fdqmask.close()
            del fdqmask
            fdq.close()
            del fdq

        except:
            fdq.close()
            del fdq
            # Safeguard against leaving behind an incomplete file
            if fileutil.findFile(maskname):
                os.remove(maskname)
            _errstr = "\nWarning: Problem creating DQMASK file for "+rootname+".\n"
            #raise IOError, _errstr
            print(_errstr)
            return None


    # Return the name of the mask image written out
    return maskname
Exemple #13
0
def process(inFile,force=False,newpath=None, inmemory=False, num_cores=None,
            headerlets=True, align_to_gaia=True):
    """ Run astrodrizzle on input file/ASN table
        using default values for astrodrizzle parameters.
    """
    # We only need to import this package if a user run the task
    import drizzlepac
    from drizzlepac import processInput # used for creating new ASNs for _flc inputs
    from stwcs import updatewcs
    from drizzlepac import alignimages
    
    # interpret envvar variable, if specified
    if envvar_compute_name in os.environ:
        val = os.environ[envvar_compute_name].lower()
        if val not in envvar_bool_dict:
            msg = "ERROR: invalid value for {}.".format(envvar_compute_name)
            msg += "  \n    Valid Values: on, off, yes, no, true, false"
            raise ValueError(msg)            
        align_to_gaia = envvar_bool_dict[val]

    if envvar_new_apriori_name in os.environ:
        # Reset ASTROMETRY_STEP_CONTROL based on this variable
        # This provides backward-compatibility until ASTROMETRY_STEP_CONTROL
        # gets removed entirely.
        val = os.environ[envvar_new_apriori_name].lower()
        if val not in envvar_dict:
            msg = "ERROR: invalid value for {}.".format(envvar_new_apriori_name)
            msg += "  \n    Valid Values: on, off, yes, no, true, false"
            raise ValueError(msg)

        os.environ[envvar_old_apriori_name] = envvar_dict[val]

    if headerlets or align_to_gaia:
        from stwcs.wcsutil import headerlet

    # Open the input file
    try:
        # Make sure given filename is complete and exists...
        inFilename = fileutil.buildRootname(inFile,ext=['.fits'])
        if not os.path.exists(inFilename):
            print("ERROR: Input file - %s - does not exist." % inFilename)
            return
    except TypeError:
        print("ERROR: Inappropriate input file.")
        return

    #If newpath was specified, move all files to that directory for processing
    if newpath:
        orig_processing_dir = os.getcwd()
        new_processing_dir = _createWorkingDir(newpath,inFilename)
        _copyToNewWorkingDir(new_processing_dir,inFilename)
        os.chdir(new_processing_dir)

    # Initialize for later use...
    _mname = None
    _new_asn = None
    _calfiles = []

    # Identify WFPC2 inputs to account for differences in WFPC2 inputs
    wfpc2_input = fits.getval(inFilename, 'instrume') == 'WFPC2'
    cal_ext = None

    # Check input file to see if [DRIZ/DITH]CORR is set to PERFORM
    if '_asn' in inFilename:
        # We are working with an ASN table.
        # Use asnutil code to extract filename
        inFilename = _lowerAsn(inFilename)
        _new_asn = [inFilename]
        _asndict = asnutil.readASNTable(inFilename,None,prodonly=False)
        _cal_prodname = _asndict['output'].lower()
        #_fname = fileutil.buildRootname(_cal_prodname,ext=['_drz.fits'])

        # Retrieve the first member's rootname for possible use later
        _fimg = fits.open(inFilename, memmap=False)
        for name in _fimg[1].data.field('MEMNAME'):
            if name[-1] != '*':
                _mname = name.split('\0', 1)[0].lower()
                break
        _fimg.close()
        del _fimg

    else:
        # Check to see if input is a _RAW file
        # If it is, strip off the _raw.fits extension...
        _indx = inFilename.find('_raw')
        if _indx < 0: _indx = len(inFilename)
        # ... and build the CALXXX product rootname.
        if wfpc2_input:
            # force code to define _c0m file as calibrated product to be used
            cal_ext = ['_c0m.fits']
        _mname = fileutil.buildRootname(inFilename[:_indx], ext=cal_ext)

        _cal_prodname = inFilename[:_indx]
        # Reset inFilename to correspond to appropriate input for
        # drizzle: calibrated product name.
        inFilename = _mname

        if _mname is None:
            errorMsg = 'Could not find calibrated product!'
            raise Exception(errorMsg)

    # Create trailer filenames based on ASN output filename or
    # on input name for single exposures
    if '_raw' in inFile:
        # Output trailer file to RAW file's trailer
        _trlroot = inFile[:inFile.find('_raw')]
    elif '_asn' in inFile:
        # Output trailer file to ASN file's trailer, not product's trailer
        _trlroot = inFile[:inFile.find('_asn')]
    else:
        # Default: trim off last suffix of input filename
        # and replacing with .tra
        _indx = inFile.rfind('_')
        if _indx > 0:
            _trlroot = inFile[:_indx]
        else:
            _trlroot = inFile

    _trlfile = _trlroot + '.tra'

    # Open product and read keyword value
    # Check to see if product already exists...
    dkey = 'DRIZCORR'
    # ...if product does NOT exist, interrogate input file
    # to find out whether 'dcorr' has been set to PERFORM
    # Check if user wants to process again regardless of DRIZCORR keyword value
    if force:
        dcorr = 'PERFORM'
    else:
        if _mname :
            _fimg = fits.open(fileutil.buildRootname(_mname,ext=['_raw.fits']), memmap=False)
            _phdr = _fimg['PRIMARY'].header
            if dkey in _phdr:
                dcorr = _phdr[dkey]
            else:
                dcorr = None
            _fimg.close()
            del _fimg
        else:
            dcorr = None

    time_str = _getTime()
    _tmptrl = _trlroot + '_tmp.tra'
    _drizfile = _trlroot + '_pydriz'
    _drizlog = _drizfile + ".log" # the '.log' gets added automatically by astrodrizzle
    _alignlog = _trlroot + '_align.log'
    if dcorr == 'PERFORM':
        if '_asn.fits' not in inFilename:
            # Working with a singleton
            # However, we always want to make sure we always use
            # a calibrated product as input, if available.
            _infile = fileutil.buildRootname(_cal_prodname, ext=cal_ext)
            _infile_flc = fileutil.buildRootname(_cal_prodname,ext=['_flc.fits'])

            _cal_prodname = _infile
            _inlist = _calfiles = [_infile]

            # Add CTE corrected filename as additional input if present
            if os.path.exists(_infile_flc) and _infile_flc != _infile:
                _inlist.append(_infile_flc)

        else:
            # Working with an ASN table...
            _infile = inFilename
            flist,duplist = processInput.checkForDuplicateInputs(_asndict['order'])
            _calfiles = flist
            if len(duplist) > 0:
                origasn = processInput.changeSuffixinASN(inFilename,'flt')
                dupasn = processInput.changeSuffixinASN(inFilename,'flc')
                _inlist = [origasn,dupasn]
            else:
                _inlist = [_infile]
            # We want to keep the original specification of the calibration
            # product name, though, not a lower-case version...
            _cal_prodname = inFilename
            _new_asn.extend(_inlist) # kept so we can delete it when finished

        # check to see whether FLC files are also present, and need to be updated
        # generate list of FLC files
        align_files = None
        _calfiles_flc = [f.replace('_flt.fits','_flc.fits') for f in _calfiles]
        # insure these files exist, if not, blank them out
        # Also pick out what files will be used for additional alignment to GAIA
        if not os.path.exists(_calfiles_flc[0]):
            _calfiles_flc = None
            align_files = _calfiles
            align_update_files = None
        else:
            align_files = _calfiles_flc
            align_update_files = _calfiles

        # Run updatewcs on each list of images
        updatewcs.updatewcs(_calfiles)
        if _calfiles_flc:
            updatewcs.updatewcs(_calfiles_flc)

        if align_to_gaia:
            # Perform additional alignment on the FLC files, if present
            ###############
            #
            # call hlapipeline code here on align_files list of files
            #
            ###############
            # Create trailer marker message for start of align_to_GAIA processing
            _trlmsg = _timestamp("Align_to_GAIA started ")
            print(_trlmsg)
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)
            _trlmsg = ""

            # Create an empty astropy table so it can be used as input/output for the perform_align function
            #align_table = Table()
            try:
                align_table = alignimages.perform_align(align_files,update_hdr_wcs=True, runfile=_alignlog)
                for row in align_table:
                    if row['status'] == 0:
                        trlstr = "Successfully aligned {} to {} astrometric frame\n"
                        _trlmsg += trlstr.format(row['imageName'], row['catalog'])
                    else:
                        trlstr = "Could not align {} to absolute astrometric frame\n"
                        _trlmsg += trlstr.format(row['imageName'])

            except Exception:
                # Something went wrong with alignment to GAIA, so report this in
                # trailer file
                _trlmsg = "EXCEPTION encountered in alignimages...\n"
                _trlmsg += "   No correction to absolute astrometric frame applied!\n"

            # Write the perform_align log to the trailer file...(this will delete the _alignlog)
            _appendTrlFile(_trlfile,_alignlog)

            # Append messages from this calling routine post-perform_align
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)
            _trlmsg = ""

            #Check to see whether there are any additional input files that need to
            # be aligned (namely, FLT images)
            if align_update_files and align_table:
                # Apply headerlets from alignment to FLT version of the files
                for fltfile, flcfile in zip(align_update_files, align_files):
                    row = align_table[align_table['imageName']==flcfile]
                    headerletFile = row['headerletFile'][0]
                    if headerletFile != "None":
                        headerlet.apply_headerlet_as_primary(fltfile, headerletFile,
                                                            attach=True, archive=True)
                        # append log file contents to _trlmsg for inclusion in trailer file
                        _trlstr = "Applying headerlet {} as Primary WCS to {}\n"
                        _trlmsg += _trlstr.format(headerletFile, fltfile)
                    else:
                        _trlmsg += "No absolute astrometric headerlet applied to {}\n".format(fltfile)

            # Finally, append any further messages associated with alignement from this calling routine
            _trlmsg += _timestamp('Align_to_GAIA completed ')
            print(_trlmsg)
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)

        # Run astrodrizzle and send its processing statements to _trlfile
        _pyver = drizzlepac.astrodrizzle.__version__

        for _infile in _inlist: # Run astrodrizzle for all inputs
            # Create trailer marker message for start of astrodrizzle processing
            _trlmsg = _timestamp('astrodrizzle started ')
            _trlmsg += __trlmarker__
            _trlmsg += '%s: Processing %s with astrodrizzle Version %s\n' % (time_str,_infile,_pyver)
            print(_trlmsg)

            # Write out trailer comments to trailer file...
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)

            _pyd_err = _trlroot+'_pydriz.stderr'

            try:
                b = drizzlepac.astrodrizzle.AstroDrizzle(input=_infile,runfile=_drizfile,
                                            configobj='defaults',in_memory=inmemory,
                                            num_cores=num_cores, **pipeline_pars)
            except Exception as errorobj:
                _appendTrlFile(_trlfile,_drizlog)
                _appendTrlFile(_trlfile,_pyd_err)
                _ftrl = open(_trlfile,'a')
                _ftrl.write('ERROR: Could not complete astrodrizzle processing of %s.\n' % _infile)
                _ftrl.write(str(sys.exc_info()[0])+': ')
                _ftrl.writelines(str(errorobj))
                _ftrl.write('\n')
                _ftrl.close()
                print('ERROR: Could not complete astrodrizzle processing of %s.' % _infile)
                raise Exception(str(errorobj))

            # Now, append comments created by PyDrizzle to CALXXX trailer file
            print('Updating trailer file %s with astrodrizzle comments.' % _trlfile)
            _appendTrlFile(_trlfile,_drizlog)

        # Save this for when astropy.io.fits can modify a file 'in-place'
        # Update calibration switch
        _fimg = fits.open(_cal_prodname, mode='update', memmap=False)
        _fimg['PRIMARY'].header[dkey] = 'COMPLETE'
        _fimg.close()
        del _fimg

        # Enforce pipeline convention of all lower-case product
        # names
        _prodlist = glob.glob('*drz.fits')
        for _prodname in _prodlist:
            _plower = _prodname.lower()
            if _prodname != _plower:  os.rename(_prodname,_plower)

    else:
        # Create default trailer file messages when astrodrizzle is not
        # run on a file.  This will typically apply only to BIAS,DARK
        # and other reference images.
        # Start by building up the message...
        _trlmsg = _timestamp('astrodrizzle skipped ')
        _trlmsg = _trlmsg + __trlmarker__
        _trlmsg = _trlmsg + '%s: astrodrizzle processing not requested for %s.\n' % (time_str,inFilename)
        _trlmsg = _trlmsg + '       astrodrizzle will not be run at this time.\n'
        print(_trlmsg)

        # Write message out to temp file and append it to full trailer file
        ftmp = open(_tmptrl,'w')
        ftmp.writelines(_trlmsg)
        ftmp.close()
        _appendTrlFile(_trlfile,_tmptrl)

    # Append final timestamp to trailer file...
    _final_msg = '%s: Finished processing %s \n' % (time_str,inFilename)
    _final_msg += _timestamp('astrodrizzle completed ')
    _trlmsg += _final_msg
    ftmp = open(_tmptrl,'w')
    ftmp.writelines(_trlmsg)
    ftmp.close()
    _appendTrlFile(_trlfile,_tmptrl)

    # If we created a new ASN table, we need to remove it
    if _new_asn is not None:
        for _name in _new_asn: fileutil.removeFile(_name)

    # Clean up any generated OrIg_files directory
    if os.path.exists("OrIg_files"):
        # check to see whether this directory is empty
        flist = glob.glob('OrIg_files/*.fits')
        if len(flist) == 0:
            os.rmdir("OrIg_files")
        else:
            print('OrIg_files directory NOT removed as it still contained images...')

    # If headerlets have already been written out by alignment code,
    # do NOT write out this version of the headerlets
    if headerlets:
        # Generate headerlets for each updated FLT image
        hlet_msg = _timestamp("Writing Headerlets started")
        for fname in _calfiles:
            frootname = fileutil.buildNewRootname(fname)
            hname = "%s_flt_hlet.fits"%frootname
            # Write out headerlet file used by astrodrizzle, however,
            # do not overwrite any that was already written out by alignimages
            if not os.path.exists(hname):
                hlet_msg += "Created Headerlet file %s \n"%hname
                try:
                    headerlet.write_headerlet(fname,'OPUS',output='flt', wcskey='PRIMARY',
                        author="OPUS",descrip="Default WCS from Pipeline Calibration",
                        attach=False,clobber=True,logging=False)
                except ValueError:
                    hlet_msg += _timestamp("SKIPPED: Headerlet not created for %s \n"%fname)
                    # update trailer file to log creation of headerlet files
        hlet_msg += _timestamp("Writing Headerlets completed")
        ftrl = open(_trlfile,'a')
        ftrl.write(hlet_msg)
        ftrl.close()

    # If processing was done in a temp working dir, restore results to original
    # processing directory, return to original working dir and remove temp dir
    if newpath:
        _restoreResults(new_processing_dir,orig_processing_dir)
        os.chdir(orig_processing_dir)
        _removeWorkingDir(new_processing_dir)

    # Provide feedback to user
    print(_final_msg)
    def writeFITS(self, template, sciarr, whtarr, ctxarr=None, versions=None, extlist=EXTLIST, overwrite=yes):
        """ Generate PyFITS objects for each output extension
            using the file given by 'template' for populating
            headers.

            The arrays will have the size specified by 'shape'.
        """

        if fileutil.findFile(self.output):
            if overwrite:
                print('Deleting previous output product: ',self.output)
                fileutil.removeFile(self.output)

            else:
                print('WARNING:  Output file ',self.output,' already exists and overwrite not specified!')
                print('Quitting... Please remove before resuming operations.')
                raise IOError

        # Default value for NEXTEND when 'build'== True
        nextend = 3
        if not self.build:
            nextend = 0
            if self.outweight:
                if overwrite:
                    if fileutil.findFile(self.outweight):
                        print('Deleting previous output WHT product: ',self.outweight)
                    fileutil.removeFile(self.outweight)
                else:
                    print('WARNING:  Output file ',self.outweight,' already exists and overwrite not specified!')
                    print('Quitting... Please remove before resuming operations.')
                    raise IOError


            if self.outcontext:
                if overwrite:
                    if fileutil.findFile(self.outcontext):
                        print('Deleting previous output CTX product: ',self.outcontext)
                    fileutil.removeFile(self.outcontext)
                else:
                    print('WARNING:  Output file ',self.outcontext,' already exists and overwrite not specified!')
                    print('Quitting... Please remove before resuming operations.')
                    raise IOError

        # Get default headers from multi-extension FITS file
        # If input data is not in MEF FITS format, it will return 'None'
        # and those headers will have to be generated from drizzle output
        # file FITS headers.
        # NOTE: These are HEADER objects, not HDUs
        prihdr,scihdr,errhdr,dqhdr = getTemplates(template,extlist)

        if prihdr == None:
            # Use readgeis to get header for use as Primary header.
            _indx = template.find('[')
            if _indx < 0:
                _data = template
            else:
                _data = template[:_indx]

            fpri = readgeis.readgeis(_data)
            prihdr = fpri[0].header.copy()
            fpri.close()
            del fpri


        # Setup primary header as an HDU ready for appending to output FITS file
        prihdu = pyfits.PrimaryHDU(header=prihdr,data=None)

        # Start by updating PRIMARY header keywords...
        prihdu.header.update('EXTEND',pyfits.TRUE,after='NAXIS')
        prihdu.header.update('NEXTEND',nextend)
        prihdu.header.update('FILENAME', self.output)

        # Update the ROOTNAME with the new value as well
        _indx = self.output.find('_drz')
        if _indx < 0:
            prihdu.header.update('ROOTNAME', self.output)
        else:
            prihdu.header.update('ROOTNAME', self.output[:_indx])


        # Get the total exposure time for the image
        # If not calculated by PyDrizzle and passed through
        # the pardict, then leave value from the template image.
        if self.texptime:
            prihdu.header.update('EXPTIME', self.texptime)
            prihdu.header.update('EXPSTART', self.expstart)
            prihdu.header.update('EXPEND', self.expend)

        #Update ASN_MTYPE to reflect the fact that this is a product
        # Currently hard-wired to always output 'PROD-DTH' as MTYPE
        prihdu.header.update('ASN_MTYP', 'PROD-DTH')

        # Update DITHCORR calibration keyword if present
        # Remove when we can modify FITS headers in place...
        if 'DRIZCORR' in prihdu.header:
            prihdu.header.update('DRIZCORR','COMPLETE')
        if 'DITHCORR' in prihdu.header:
            prihdu.header.update('DITHCORR','COMPLETE')

        prihdu.header.update('NDRIZIM',len(self.parlist),
            comment='Drizzle, No. images drizzled onto output')

        self.addDrizKeywords(prihdu.header,versions)

        if scihdr:
            del scihdr['OBJECT']
            if 'CCDCHIP' in scihdr: scihdr.update('CCDCHIP','-999')
            if 'NCOMBINE' in scihdr:
                scihdr.update('NCOMBINE', self.parlist[0]['nimages'])

            # If BUNIT keyword was found and reset, then
            if self.bunit is not None:
                scihdr.update('BUNIT',self.bunit,comment="Units of science product")

            if self.wcs:
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                scihdr.update('ORIENTAT',self.wcs.orient)
                scihdr.update('CD1_1',self.wcs.cd11)
                scihdr.update('CD1_2',self.wcs.cd12)
                scihdr.update('CD2_1',self.wcs.cd21)
                scihdr.update('CD2_2',self.wcs.cd22)
                scihdr.update('CRVAL1',self.wcs.crval1)
                scihdr.update('CRVAL2',self.wcs.crval2)
                scihdr.update('CRPIX1',self.wcs.crpix1)
                scihdr.update('CRPIX2',self.wcs.crpix2)
                scihdr.update('VAFACTOR',1.0)
                # Remove any reference to TDD correction
                if 'TDDALPHA' in scihdr:
                    del scihdr['TDDALPHA']
                    del scihdr['TDDBETA']
                # Remove '-SIP' from CTYPE for output product
                if scihdr['ctype1'].find('SIP') > -1:
                    scihdr.update('ctype1', scihdr['ctype1'][:-4])
                    scihdr.update('ctype2',scihdr['ctype2'][:-4])
                    # Remove SIP coefficients from DRZ product
                    for k in scihdr.items():
                        if (k[0][:2] in ['A_','B_']) or (k[0][:3] in ['IDC','SCD'] and k[0] != 'IDCTAB') or \
                        (k[0][:6] in ['SCTYPE','SCRVAL','SNAXIS','SCRPIX']):
                            del scihdr[k[0]]
                self.addPhotKeywords(scihdr,prihdu.header)


        ##########
        # Now, build the output file
        ##########
        if self.build:
            print('-Generating multi-extension output file: ',self.output)
            fo = pyfits.HDUList()

            # Add primary header to output file...
            fo.append(prihdu)

            hdu = pyfits.ImageHDU(data=sciarr,header=scihdr,name=extlist[0])
            fo.append(hdu)

            # Build WHT extension here, if requested...
            if errhdr:
                errhdr.update('CCDCHIP','-999')

            hdu = pyfits.ImageHDU(data=whtarr,header=errhdr,name=extlist[1])
            hdu.header.update('EXTVER',1)
            if self.wcs:
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                hdu.header.update('ORIENTAT',self.wcs.orient)
                hdu.header.update('CD1_1',self.wcs.cd11)
                hdu.header.update('CD1_2',self.wcs.cd12)
                hdu.header.update('CD2_1',self.wcs.cd21)
                hdu.header.update('CD2_2',self.wcs.cd22)
                hdu.header.update('CRVAL1',self.wcs.crval1)
                hdu.header.update('CRVAL2',self.wcs.crval2)
                hdu.header.update('CRPIX1',self.wcs.crpix1)
                hdu.header.update('CRPIX2',self.wcs.crpix2)
                hdu.header.update('VAFACTOR',1.0)

            fo.append(hdu)

            # Build CTX extension here
            # If there is only 1 plane, write it out as a 2-D extension
            if self.outcontext:
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr
            else:
                _ctxarr = None

            hdu = pyfits.ImageHDU(data=_ctxarr,header=dqhdr,name=extlist[2])
            hdu.header.update('EXTVER',1)
            if self.wcs:
                # Update WCS Keywords based on PyDrizzle product's value
                # since 'drizzle' itself doesn't update that keyword.
                hdu.header.update('ORIENTAT',self.wcs.orient)
                hdu.header.update('CD1_1',self.wcs.cd11)
                hdu.header.update('CD1_2',self.wcs.cd12)
                hdu.header.update('CD2_1',self.wcs.cd21)
                hdu.header.update('CD2_2',self.wcs.cd22)
                hdu.header.update('CRVAL1',self.wcs.crval1)
                hdu.header.update('CRVAL2',self.wcs.crval2)
                hdu.header.update('CRPIX1',self.wcs.crpix1)
                hdu.header.update('CRPIX2',self.wcs.crpix2)
                hdu.header.update('VAFACTOR',1.0)


            fo.append(hdu)

            fo.writeto(self.output)
            fo.close()
            del fo, hdu

        else:
            print('-Generating simple FITS output: ',self.outdata)
            fo = pyfits.HDUList()

            hdu = pyfits.PrimaryHDU(data=sciarr, header=prihdu.header)

            # Append remaining unique header keywords from template DQ
            # header to Primary header...
            if scihdr:
                for _card in scihdr.ascard:
                    if (_card.key not in RESERVED_KEYS and
                        _card.key not in hdu.header):
                        hdu.header.ascard.append(_card)
            del hdu.header['PCOUNT']
            del hdu.header['GCOUNT']
            self.addPhotKeywords(hdu.header, prihdu.header)
            hdu.header.update('filename', self.outdata)

            # Add primary header to output file...
            fo.append(hdu)
            fo.writeto(self.outdata)
            del fo,hdu

            if self.outweight and whtarr != None:
                # We need to build new PyFITS objects for each WHT array
                fwht = pyfits.HDUList()

                if errhdr:
                    errhdr.update('CCDCHIP','-999')

                hdu = pyfits.PrimaryHDU(data=whtarr, header=prihdu.header)

                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if errhdr:
                    for _card in errhdr.ascard:
                        if (_card.key not in RESERVED_KEYS and
                            _card.key not in hdu.header):
                            hdu.header.ascard.append(_card)
                hdu.header.update('filename', self.outweight)
                hdu.header.update('CCDCHIP','-999')
                if self.wcs:
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    hdu.header.update('ORIENTAT',self.wcs.orient)
                    hdu.header.update('CD1_1',self.wcs.cd11)
                    hdu.header.update('CD1_2',self.wcs.cd12)
                    hdu.header.update('CD2_1',self.wcs.cd21)
                    hdu.header.update('CD2_2',self.wcs.cd22)
                    hdu.header.update('CRVAL1',self.wcs.crval1)
                    hdu.header.update('CRVAL2',self.wcs.crval2)
                    hdu.header.update('CRPIX1',self.wcs.crpix1)
                    hdu.header.update('CRPIX2',self.wcs.crpix2)
                    hdu.header.update('VAFACTOR',1.0)

                # Add primary header to output file...
                fwht.append(hdu)
                fwht.writeto(self.outweight)
                del fwht,hdu

            # If a context image was specified, build a PyFITS object
            # for it as well...
            if self.outcontext and ctxarr != None:
                fctx = pyfits.HDUList()

                # If there is only 1 plane, write it out as a 2-D extension
                if ctxarr.shape[0] == 1:
                    _ctxarr = ctxarr[0]
                else:
                    _ctxarr = ctxarr

                hdu = pyfits.PrimaryHDU(data=_ctxarr, header=prihdu.header)

                # Append remaining unique header keywords from template DQ
                # header to Primary header...
                if dqhdr:
                    for _card in dqhdr.ascard:
                        if (_card.key not in RESERVED_KEYS and
                            _card.key not in hdu.header):
                            hdu.header.ascard.append(_card)
                hdu.header.update('filename', self.outcontext)
                if self.wcs:
                    # Update WCS Keywords based on PyDrizzle product's value
                    # since 'drizzle' itself doesn't update that keyword.
                    hdu.header.update('ORIENTAT',self.wcs.orient)
                    hdu.header.update('CD1_1',self.wcs.cd11)
                    hdu.header.update('CD1_2',self.wcs.cd12)
                    hdu.header.update('CD2_1',self.wcs.cd21)
                    hdu.header.update('CD2_2',self.wcs.cd22)
                    hdu.header.update('CRVAL1',self.wcs.crval1)
                    hdu.header.update('CRVAL2',self.wcs.crval2)
                    hdu.header.update('CRPIX1',self.wcs.crpix1)
                    hdu.header.update('CRPIX2',self.wcs.crpix2)
                    hdu.header.update('VAFACTOR',1.0)

                fctx.append(hdu)
                fctx.writeto(self.outcontext)
                del fctx,hdu