Ejemplo n.º 1
0
def checkForDuplicateInputs(rootnames):
    """
    Check input files specified in ASN table for duplicate versions with
    multiple valid suffixes (_flt and _flc, for example).
    """

    flist = []
    duplist = []

    for fname in rootnames:
        # Look for any recognized CTE-corrected products
        f1 = fileutil.buildRootname(fname,ext=['_flc.fits'])
        f2 = fileutil.buildRootname(fname)
        flist.append(f2)
        if os.path.exists(f1) and f1 != f2:
            # More than 1 valid input found for this rootname
            duplist.append(f1)

    return flist,duplist
Ejemplo n.º 2
0
def checkForDuplicateInputs(rootnames):
    """
    Check input files specified in ASN table for duplicate versions with
    multiple valid suffixes (_flt and _flc, for example).
    """

    flist = []
    duplist = []

    for fname in rootnames:
        # Look for any recognized CTE-corrected products
        f1 = fileutil.buildRootname(fname,ext=['_flc.fits'])
        f2 = fileutil.buildRootname(fname)
        flist.append(f2)
        if os.path.exists(f1) and f1 != f2:
            # More than 1 valid input found for this rootname
            duplist.append(f1)

    return flist,duplist
Ejemplo n.º 3
0
def define_output_name(fname):
    """Determines the output name based on input filename or drizzle keyword """
    phdr = pyfits.getheader(fname, ext=0)
    if 'D001DATA' in phdr:
        outname = phdr['D001DATA']
        if outname.find('['): outname = outname.split('[')[0]
    else:
        frootname = fname.split('_')[0]
        outname = fileutil.buildRootname(frootname)
        if outname is None:
            # make one from the header keywords
            outname = phdr['rootname']
    del phdr

    return outname
Ejemplo n.º 4
0
def parse_input(input, prodonly=False, sort_wildcards=True):
    catlist = None

    if (isinstance(input, list) == False) and \
       ('_asn' in input or '_asc' in input) :
        # Input is an association table
        # Get the input files
        oldasndict = asnutil.readASNTable(input, prodonly=prodonly)
        filelist = [
            fileutil.buildRootname(fname) for fname in oldasndict['order']
        ]

    elif (isinstance(input, list) == False) and \
       (input[0] == '@') :
        # input is an @ file
        f = open(input[1:])
        # Read the first line in order to determine whether
        # catalog files have been specified in a second column...
        line = f.readline()
        f.close()
        # Parse the @-file with irafglob to extract the input filename
        filelist = irafglob.irafglob(input, atfile=atfile_sci)
        print(line)
        # If there are additional columns for catalog files...
        if len(line.split()) > 1:
            # ...parse out the names of the catalog files as well
            catlist, catdict = parse_atfile_cat(input)
    elif (isinstance(input, list)):
        # input a python list
        filelist = []
        for fn in input:
            flist, output = parse_input(fn, prodonly=prodonly)
            # if wild-cards are given, sort for uniform usage:
            if fn.find('*') > -1 and sort_wildcards:
                flist.sort()
            filelist += flist
    else:
        # input is either a string or something unrecognizable, so give it a try:
        try:
            filelist, output = parseinput.parseinput(input)
            # if wild-cards are given, sort for uniform usage:
            if input.find('*') > -1 and sort_wildcards:
                filelist.sort()
        except IOError:
            raise

    return filelist, catlist
Ejemplo n.º 5
0
def parse_input(input, prodonly=False, sort_wildcards=True):
    catlist = None

    if (isinstance(input, list) == False) and \
       ('_asn' in input or '_asc' in input) :
        # Input is an association table
        # Get the input files
        oldasndict = asnutil.readASNTable(input, prodonly=prodonly)
        filelist = [fileutil.buildRootname(fname) for fname in oldasndict['order']]

    elif (isinstance(input, list) == False) and \
       (input[0] == '@') :
        # input is an @ file
        f = open(input[1:])
        # Read the first line in order to determine whether
        # catalog files have been specified in a second column...
        line = f.readline()
        f.close()
        # Parse the @-file with irafglob to extract the input filename
        filelist = irafglob.irafglob(input, atfile=atfile_sci)
        print(line)
        # If there are additional columns for catalog files...
        if len(line.split()) > 1:
            # ...parse out the names of the catalog files as well
            catlist,catdict = parse_atfile_cat(input)
    elif (isinstance(input, list)):
        # input a python list
        filelist = []
        for fn in input:
            flist, output = parse_input(fn, prodonly=prodonly)
            # if wild-cards are given, sort for uniform usage:
            if fn.find('*') > -1 and sort_wildcards:
                flist.sort()
            filelist += flist
    else:
        # input is either a string or something unrecognizable, so give it a try:
        try:
            filelist, output = parseinput.parseinput(input)
            # if wild-cards are given, sort for uniform usage:
            if input.find('*') > -1 and sort_wildcards:
                filelist.sort()
        except IOError: raise

    return filelist,catlist
Ejemplo n.º 6
0
def wf2_pydr(filename):
    trl_start = "PYDRIZZLE STARTED------------ %s ------------\n" % time.asctime(
    )

    filename = fileutil.buildRootname(filename, ext=['_c0m.fits', '_c0f.fits'])
    try:
        hdr0 = pyfits.getheader(filename)
    except:
        errormsg = "\n\nCan't find file %s \n", filename
        raise IOError(errormsg)

    try:
        dodrizcorr = hdr0['DRIZCORR']
    except KeyError:
        print("Keyword DRIZCORR not found\n")
        dodrizcorr = None
    if isSupportedFilter(hdr0) and isSupportedOfftab(hdr0):
        useIDCModel = True
    else:
        useIDCModel = False

    if dodrizcorr != None:
        if dodrizcorr == 'PERFORM':
            try:
                # update WCS information
                if useIDCModel:
                    p = pydrizzle.PyDrizzle(filename,
                                            bits_single=0,
                                            bits_final=0)
                else:
                    p = pydrizzle.PyDrizzle(filename,
                                            bits_single=0,
                                            bits_final=0,
                                            updatewcs=False,
                                            idckey=None)
                # resample to the scale of chip #3
                # this is done in the pipeline to keep the size of the
                # output drz product small
                chips = [member.chip for member in p.observation.members]
                """
                try:
                    chip_ind = chips.index('3')
                except ValueError:
                    chip_ind = 0
                oscale=p.observation.members[chip_ind].geometry.wcslin.pscale
                """
                # Reset the plate-scale to a fixed value of 0.1 arcsecond/pixel
                # but NOT if the PC was the only chip read out.
                if len(chips) > 1 or (len(chips) == 1 and chips[0] != '1'):
                    skf = pydrizzle.pydrizzle.SkyField()
                    skf.set(psize=0.1)
                    p.resetPars(skf)
                p.run(clean='yes')
                # Clean up any mask files which were created and not deleted by PyDrizzle
                fileutil.removeFile(glob.glob('wfpc2_inmask*.fits'))

            except:
                raise
            #update_header(filename)
            # Update header of output file, not input file
            update_header(p.output)
            trl_end = "END PYDRIZZLE------------ %s ------------\n" % time.asctime(
            )
        else:
            trl_end = 'PYDRIZZLE processing not requested\n'
            trl_end += "END PYDRIZZLE------------ %s ------------\n" % time.asctime(
            )
            return trl_start, trl_end
    else:
        trl_end = 'Keyword DRIZCORR missing from header.\n'
        trl_end += 'Pydrizzle did not run.\n'

    return trl_start, trl_end
Ejemplo n.º 7
0
def updateAsnTable(tabname,
                   rootname,
                   xsh=None,
                   ysh=None,
                   form="absolute",
                   rot=None,
                   scale=None,
                   frame="input",
                   units="pixels",
                   output=None,
                   mode="replace"):
    """ Updates an existing ASN table with shifts in arcseconds of RA and Dec.
        Parameters:
            tabname     - name of ASN table to be updated
            rootname    - name of image whose pointing needs to be updated
            xsh         - X shift from nominal to be applied
            ysh         - Y shift from nominal to be applied
            rot         - additional rotation to be applied
            scale       - scale factor to be applied
            form        - specifies how shifts are computed:
                            absolute (default) or delta
            frame       - specifies whether xsh/ysh are given in terms of
                            input (default) or output frame
            units       - specifies units of xsh/ysh:
                            pixels (default) or arcseconds
            output      - filename (with or without path) of output image
                          from which output pixels shifts were computed.
            mode        - replace or add shifts to existing values
                            'replace' (default) or 'sum'
        The task will:
            - open the ASN file (tabname)
            - search for the row matching the rootname provided by
                the user (img),
            - convert any pixel shifts to arcseconds
            - update the XOFFSET, YOFFSET, ROTATION columns
                (or create them if they don't already exist)

        This task assumes that the shifts given as 'input' pixels are
        already distortion-corrected, but not scaled/rotated to output frame.

        The conversion of shifts given in terms of output pixel coordinates
        requires the use of the WCS from the output frame in order to correctly
        take into account the output frame's pixel scale and orientation.

        Both input and output image specifications assume '.fits' and that
        the WCS information appropriate for this are in either [0] or [sci,1].
        If necessary a different extension can be specified in the filename.
        Also, the output filename can include a path which will be used.

    EXAMPLE:
            1. Update ASN table 'j8cw03020_asn.fits' for 'j8cw03021_crj.fits'
                which has pixel shifts of (10.3,4.72) in (undistorted) input
                frame.
            >>> updateasn.updateAsnTable('j8cw03020_asn.fits','j8cw03021',
                xsh=10.3,ysh=4.72,rot=0.14)
    NOTE:
        This task still does not correctly work with 'NaN' entries in
        the table.  For those values, it will not change the values at all.

    """
    if frame == 'output' and output == None:
        print('Please specify output image needed to compute deltas...')
        return

    print('updateAsnTable Version ', __version__)

    _tmpname = 'buildasn_' + tabname
    # Start by opening the ASN table
    _asntab = pyfits.open(tabname, 'update')

    if form == 'absolute' or form == 'relative':
        # absolute/relative shifts
        _colx = 'XOFFSET'
        _coly = 'YOFFSET'

    else:
        # delta shifts
        _colx = 'XDELTA'
        _coly = 'YDELTA'
    #
    # Work out what needs to be done to update ASN table
    # Do the XOFFSET/YOFFSET/ROTATION columns already exist or not?
    #
    _update = no
    _add_rot = yes
    _add_scale = yes
    _colnum = 0
    for name in _asntab[1].columns.names:
        if name == _colx:
            # Make the assumption that XOFFSET/YOFFSET and ROTATION
            # are all present if one is present...
            _update = yes
            break
        _colnum += 1

    if _update:
        _tab_units = _asntab[1].columns.units[_colnum]
        if _tab_units == '' or _tab_units == None: _tab_units = 'pixels'
    else:
        _tab_units = units

    for name in _asntab[1].columns.names:
        if name == 'ROTATION':
            _add_rot = no
            break
    for name in _asntab[1].columns.names:
        if name == 'SCALE':
            _add_scale = no
            break

    # Get the shift frame and refimage if present
    # If keywords don't exist, assume default of 'input' shifts
    # with no need for refimage.
    try:
        _tab_frame = _asntab[0].header['shframe']
    except KeyError:
        _tab_frame = 'input'

    try:
        _tab_refimg = _asntab[0].header['refimage']
    except KeyError:
        _tab_refimg = None

    # Find row which corresponds to input image
    _rownum = 0
    for row in _asntab[1].data:
        if rootname.find(row.field('MEMNAME')) > -1:
            _row = row
            break
        _rownum += 1
    print('Updating table row ', _rownum, ' for image: ', rootname)

    #
    # Determine where to get the distortion coefficients
    # create complete filename from given rootname
    img = fileutil.buildRootname(rootname)
    if img == None:
        print('No valid input image for filename ', rootname)
        raise ValueError

    _instrument = fileutil.getKeyword(img, 'instrume')
    if _instrument not in IDCKEYS: _instrument = 'detector'
    _key = IDCKEYS[_instrument]
    if xsh == None: xsh = 0.
    if ysh == None: ysh = 0.

    # If we have any trouble computing offsets, abort and
    # do NOT update table.
    try:
        # Start by determining values to be used to update the table.
        #
        # Need to compute: _delta_ra, _delta_dec (undistorted arcsec)
        if units == _tab_units or (xsh == 0. and ysh == 0.):
            _delta_xoff = xsh
            _delta_yoff = ysh
            if rot == None:
                rot = 0.
            if scale == None:
                scale = 1.0

        elif units.find('pixels') > -1 and _tab_units.find('arcsec') > -1:
            #
            # We need to convert the pixel values to arcseconds
            #
            _wcs = _getExposure(img, output, frame, _key)

            # If there is any additional rotation, account for it first
            if rot != None:
                if scale != None:
                    pscale = scale * _wcs.pscale
                else:
                    pscale = None
                _wcs.updateWCS(orient=_wcs.orient + rot, pixel_scale=pscale)

            # determine delta CRVALs from CRPIX+(xsh,ysh)
            _rd_delta = _wcs.xy2rd((_wcs.crpix1 + xsh, _wcs.crpix2 + ysh))

            # Now, convert from degrees to arcseconds
            #### Do we need to multiply by cos(dec) for delta(RA)???
            _delta_xoff = (_rd_delta[0] - _wcs.crval1) * 3600.
            _delta_yoff = (_rd_delta[1] - _wcs.crval2) * 3600.

        elif units.find('arcsec') > -1 and _tab_units.find('pixels') > -1:
            _wcs = _getExposure(img, _tab_refimg, _tab_frame, _key)
            # If there is any additional rotation, account for it first
            if rot != None:
                if scale != None:
                    pscale = scale * _wcs.pscale
                else:
                    pscale = None
                _wcs.updateWCS(orient=_wcs.orient + rot, pixel_scale=pscale)

            _xy_delta = _wcs.rd2xy((_wcs.crval1 + xsh, _wcs.crval2 + ysh))
            _delta_xoff = (_xy_delta[0] - _wcs.crpix1)
            _delta_yoff = (_xy_delta[1] - _wcs.crpix2)

    except:
        print('ERROR: Error in updating table ', tabname)
        print('Closing existing table without updating it...')
        _asntab.close()
        return

    if _update:
        # Offset columns exist, just update the values in the table
        if mode == 'sum':
            _shift_xoff = _asntab[1].data.field(_colx)[_rownum]
            _shift_yoff = _asntab[1].data.field(_coly)[_rownum]
            _shift_rot = _asntab[1].data.field('ROTATION')[_rownum]
            _shift_scale = _asntab[1].data.field('SCALE')[_rownum]

            # Account for INDEF values in table
            # Logic: If set to INDEF, adding 1.0 will not change its value
            if _shift_xoff + 1.0 == _shift_xoff: _shift_xoff = 0.
            if _shift_yoff + 1.0 == _shift_yoff: _shift_yoff = 0.
            if _shift_rot + 1.0 == _shift_rot: _shift_rot = 0.
            if _shift_scale + 1.0 == _shift_scale: _shift_scale = 1.0
        else:
            _shift_xoff = 0.
            _shift_yoff = 0.
            _shift_rot = 0.
            _shift_scale = 1.
        _asntab[1].data.field(_colx)[_rownum] = _shift_xoff + _delta_xoff
        _asntab[1].data.field(_coly)[_rownum] = _shift_yoff + _delta_yoff
        if rot != None:
            _asntab[1].data.field('ROTATION')[_rownum] = _shift_rot + rot
        if scale != None:
            _asntab[1].data.field('SCALE')[_rownum] = _shift_scale * scale
    else:
        # We need to add the extra columns to the table
        # Build arrays for each additional column
        _numrows = len(_asntab[1].data)
        _xsh = N.zeros(_numrows, dtype=N.float32)
        _ysh = N.zeros(_numrows, dtype=N.float32)

        # Now update entry associated with this particular image
        _xsh[_rownum] = _delta_xoff
        _ysh[_rownum] = _delta_yoff

        # Build the column objects for the table
        #_xc,_yc,_rc = buildasn._makeOffsetColumns(_xsh,_ysh,_rot)
        _xc = pyfits.Column(name=_colx,
                            format='E',
                            unit=_tab_units,
                            array=_xsh)
        _yc = pyfits.Column(name=_coly,
                            format='E',
                            unit=_tab_units,
                            array=_ysh)
        _newcols = _asntab[1].columns + _xc
        _newcols += _yc
        #_asndefs = _asntab[1].get_coldefs()
        #_asndefs.add_col(_xc)
        #_asndefs.add_col(_yc)

        # If rotation column does NOT exist,
        if _add_rot:
            _rot = N.zeros(_numrows, dtype=N.float32)
            if rot != None:
                _rot[_rownum] = rot
            _rc = pyfits.Column(name='ROTATION',
                                format='E',
                                unit='degrees',
                                array=_rot)
            #  add newly created column
            #_asndefs.add_col(_rc)
            _newcols += _rc
        else:
            # else, update column directly.
            _asntab[1].data.field('ROTATION')[_rownum] = rot

        # If scaling column does NOT exist,
        if _add_scale:
            _scale = N.zeros(_numrows, dtype=N.float32)
            if scale != None:
                _scale[_rownum] = scale
            _sc = pyfits.Column(name='SCALE',
                                format='E',
                                unit='',
                                array=_scale)
            _newcols += _sc
        else:
            # else, update column directly.
            _asntab[1].data.field('SCALE')[_rownum] = scale

        _asnhdu = pyfits.new_table(_newcols)
        _asnhdu.writeto(_tmpname)
        # remove old data
        #del _asntab[1]
        # add table extension with old data and new columns
        #_asntab.append(_asnhdu)

    # Close and clean-up
    _asntab.close()
    del _asntab
    if os.path.exists(_tmpname):
        os.remove(tabname)
        os.rename(_tmpname, tabname)
Ejemplo n.º 8
0
def setCommonInput(configObj, createOutwcs=True):
    """
    The common interface interpreter for MultiDrizzle tasks which not only runs
    'process_input()' but 'createImageObject()' and 'defineOutput()' as well to
    fully setup all inputs for use with the rest of the MultiDrizzle steps either
    as stand-alone tasks or internally to MultiDrizzle itself.

    Parameters
    ----------
    configObj : object
        configObj instance or simple dictionary of input parameters
    imageObjectList : list of imageObject objects
        list of imageObject instances, 1 for each input exposure
    outwcs : object
        imageObject instance defining the final output frame

    Notes
    -----
    At a minimum, the configObj instance (dictionary) should contain:
        configObj = {'input':None,'output':None }

    If provided, the configObj should contain the values of all the multidrizzle parameters
    as set by the user with TEAL. If no configObj is given, it will retrieve
    the default values automatically.  In either case, the values from the input_dict
    will be merged in with the configObj before being used by the rest of the
    code.

    Examples
    --------
    You can set *createOutwcs=False* for the cases where you only want the
    images processed and no output wcs information in necessary; as in:

    >>> imageObjectList,outwcs = processInput.processCommonInput(configObj)


    """
    # make sure 'updatewcs' is set to False when running from GUI or if missing
    # from configObj:
    if 'updatewcs' not in configObj:
        configObj['updatewcs'] = False

    if not createOutwcs or not configObj['coeffs']:
        # we're probably just working on single images here
        configObj['updatewcs']=False

    # maybe we can chunk this part up some more so that we can call just the
    # parts we want

    # Interpret input, read and convert and update input files, then return
    # list of input filenames and derived output filename
    asndict, ivmlist, output = process_input(
            configObj['input'], configObj['output'],
            updatewcs=configObj['updatewcs'], wcskey=configObj['wcskey'],
            **configObj['STATE OF INPUT FILES'])

    if not asndict:
        return None, None
    # convert the filenames from asndict into a list of full filenames
    files = [fileutil.buildRootname(f) for f in asndict['order']]
    original_files = asndict['original_file_names']

    # interpret MDRIZTAB, if specified, and update configObj accordingly
    # This can be done here because MDRIZTAB does not include values for
    # input, output, or updatewcs.
    if 'mdriztab' in configObj and configObj['mdriztab']:
        print("Reading in MDRIZTAB parameters for {} files".format(len(files)))
        mdriztab_dict = mdzhandler.getMdriztabParameters(files)

        # Update configObj with values from mpars
        cfgpars.mergeConfigObj(configObj, mdriztab_dict)

    # Convert interpreted list of input files from process_input into a list
    # of imageObject instances for use by the MultiDrizzle tasks.
    instrpars = configObj['INSTRUMENT PARAMETERS']
    # pass in 'proc_unit' to initialize unit conversions as necessary
    instrpars['proc_unit'] = configObj['proc_unit']

    undistort = True
    if not configObj['coeffs']:
        undistort = False

    # determine whether parallel processing will be performed
    use_parallel = False
    if util.can_parallel:
        # look to see whether steps which can be run using multiprocessing
        # have been turned on
        for stepnum in parallel_steps:
            sname = util.getSectionName(configObj,stepnum[0])
            if configObj[sname][stepnum[1]]:
                use_parallel = True
                break

    # interpret all 'bits' related parameters and convert them to integers
    configObj['resetbits'] = interpret_bit_flags(configObj['resetbits'])
    step3name = util.getSectionName(configObj,3)
    configObj[step3name]['driz_sep_bits'] = interpret_bit_flags(
                                        configObj[step3name]['driz_sep_bits']
    )
    step7name = util.getSectionName(configObj,7)
    configObj[step7name]['final_bits'] = interpret_bit_flags(
                                        configObj[step7name]['final_bits']
    )

    # Verify any refimage parameters to be used
    step3aname = util.getSectionName(configObj,'3a')
    if not util.verifyRefimage(configObj[step3aname]['driz_sep_refimage']):
        msg = 'No refimage with WCS found!\n '+\
        ' This could be caused by one of 2 problems:\n'+\
        '   * filename does not specify an extension with a valid WCS.\n'+\
        '   * can not find the file.\n'+\
        'Please check the filename specified in the "refimage" parameter.'
        print(textutil.textbox(msg))
        return None,None
    step7aname = util.getSectionName(configObj,'7a')
    if not util.verifyRefimage(configObj[step7aname]['final_refimage']):
        msg = 'No refimage with WCS found!\n '+\
        ' This could be caused by one of 2 problems:\n'+\
        '   * filename does not specify an extension with a valid WCS.\n'+\
        '   * can not find the file.\n'+\
        'Please check the filename specified in the "refimage" parameter.'
        print(textutil.textbox(msg))
        return None,None


    # Build imageObject list for all the valid, shift-updated input files
    log.info('-Creating imageObject List as input for processing steps.')
    if 'in_memory' in configObj:
        virtual = configObj['in_memory']
    else:
        virtual = False

    imageObjectList = createImageObjectList(files, instrpars,
                                            group=configObj['group'],
                                            undistort=undistort,
                                            inmemory=virtual)

    # Add original file names as "hidden" attributes of imageObject
    assert(len(original_files) == len(imageObjectList)) #TODO: remove after extensive testing
    for i in range(len(imageObjectList)):
        imageObjectList[i]._original_file_name = original_files[i]

    # apply context parameter
    applyContextPar(imageObjectList, configObj['context'])

    # reset DQ bits if requested by user
    resetDQBits(imageObjectList, cr_bits_value=configObj['resetbits'])

    # Add info about input IVM files at this point to the imageObjectList
    addIVMInputs(imageObjectList, ivmlist)

    if createOutwcs:
        log.info('-Creating output WCS.')

        # Build output WCS and update imageObjectList with output WCS info
        outwcs = wcs_functions.make_outputwcs(imageObjectList, output,
                                              configObj=configObj, perfect=True)
        outwcs.final_wcs.printwcs()
    else:
        outwcs = None

    try:
        # Provide user with some information on resource usage for this run
        # raises ValueError Exception in interactive mode and user quits
        num_cores = configObj.get('num_cores') if use_parallel else 1

        reportResourceUsage(imageObjectList, outwcs, num_cores)
    except ValueError:
        imageObjectList = None

    return imageObjectList, outwcs
Ejemplo n.º 9
0
def setCommonInput(configObj, createOutwcs=True):
    """
    The common interface interpreter for MultiDrizzle tasks which not only runs
    'process_input()' but 'createImageObject()' and 'defineOutput()' as well to
    fully setup all inputs for use with the rest of the MultiDrizzle steps either
    as stand-alone tasks or internally to MultiDrizzle itself.

    Parameters
    ----------
    configObj : object
        configObj instance or simple dictionary of input parameters
    imageObjectList : list of imageObject objects
        list of imageObject instances, 1 for each input exposure
    outwcs : object
        imageObject instance defining the final output frame

    Notes
    -----
    At a minimum, the configObj instance (dictionary) should contain:
        configObj = {'input':None,'output':None }

    If provided, the configObj should contain the values of all the multidrizzle parameters
    as set by the user with TEAL. If no configObj is given, it will retrieve
    the default values automatically.  In either case, the values from the input_dict
    will be merged in with the configObj before being used by the rest of the
    code.

    Examples
    --------
    You can set *createOutwcs=False* for the cases where you only want the
    images processed and no output wcs information in necessary; as in:

    >>> imageObjectList,outwcs = processInput.processCommonInput(configObj)


    """
    # make sure 'updatewcs' is set to False when running from GUI or if missing
    # from configObj:
    if 'updatewcs' not in configObj:
        configObj['updatewcs'] = False

    if not createOutwcs or not configObj['coeffs']:
        # we're probably just working on single images here
        configObj['updatewcs'] = False

    # maybe we can chunk this part up some more so that we can call just the
    # parts we want

    # Interpret input, read and convert and update input files, then return
    # list of input filenames and derived output filename
    asndict, ivmlist, output = process_input(
        configObj['input'],
        configObj['output'],
        updatewcs=configObj['updatewcs'],
        wcskey=configObj['wcskey'],
        **configObj['STATE OF INPUT FILES'])

    if not asndict:
        return None, None
    # convert the filenames from asndict into a list of full filenames
    files = [fileutil.buildRootname(f) for f in asndict['order']]
    original_files = asndict['original_file_names']

    # interpret MDRIZTAB, if specified, and update configObj accordingly
    # This can be done here because MDRIZTAB does not include values for
    # input, output, or updatewcs.
    if 'mdriztab' in configObj and configObj['mdriztab']:
        print("Reading in MDRIZTAB parameters for {} files".format(len(files)))
        mdriztab_dict = mdzhandler.getMdriztabParameters(files)

        # Update configObj with values from mpars
        cfgpars.mergeConfigObj(configObj, mdriztab_dict)

    # Convert interpreted list of input files from process_input into a list
    # of imageObject instances for use by the MultiDrizzle tasks.
    instrpars = configObj['INSTRUMENT PARAMETERS']
    # pass in 'proc_unit' to initialize unit conversions as necessary
    instrpars['proc_unit'] = configObj['proc_unit']

    undistort = True
    if not configObj['coeffs']:
        undistort = False

    # determine whether parallel processing will be performed
    use_parallel = False
    if util.can_parallel:
        # look to see whether steps which can be run using multiprocessing
        # have been turned on
        for stepnum in parallel_steps:
            sname = util.getSectionName(configObj, stepnum[0])
            if configObj[sname][stepnum[1]]:
                use_parallel = True
                break

    # interpret all 'bits' related parameters and convert them to integers
    configObj['resetbits'] = interpret_bit_flags(configObj['resetbits'])
    step3name = util.getSectionName(configObj, 3)
    configObj[step3name]['driz_sep_bits'] = interpret_bit_flags(
        configObj[step3name]['driz_sep_bits'])
    step4name = util.getSectionName(configObj, 4)
    if len(files) > 5 and 'minmed' in configObj[step4name]['combine_type']:
        msg = '“minmed” is highly recommended for three images, \n'+\
        ' and is good for four to six images, \n'+\
        ' but should be avoided for ten or more images.\n'
        print(textutil.textbox(msg))

    step7name = util.getSectionName(configObj, 7)
    configObj[step7name]['final_bits'] = interpret_bit_flags(
        configObj[step7name]['final_bits'])

    # Verify any refimage parameters to be used
    step3aname = util.getSectionName(configObj, '3a')
    if not util.verifyRefimage(configObj[step3aname]['driz_sep_refimage']):
        msg = 'No refimage with WCS found!\n '+\
        ' This could be caused by one of 2 problems:\n'+\
        '   * filename does not specify an extension with a valid WCS.\n'+\
        '   * can not find the file.\n'+\
        'Please check the filename specified in the "refimage" parameter.'
        print(textutil.textbox(msg))
        return None, None
    step7aname = util.getSectionName(configObj, '7a')
    if not util.verifyRefimage(configObj[step7aname]['final_refimage']):
        msg = 'No refimage with WCS found!\n '+\
        ' This could be caused by one of 2 problems:\n'+\
        '   * filename does not specify an extension with a valid WCS.\n'+\
        '   * can not find the file.\n'+\
        'Please check the filename specified in the "refimage" parameter.'
        print(textutil.textbox(msg))
        return None, None

    # Build imageObject list for all the valid, shift-updated input files
    log.info('-Creating imageObject List as input for processing steps.')
    if 'in_memory' in configObj:
        virtual = configObj['in_memory']
    else:
        virtual = False

    imageObjectList = createImageObjectList(files,
                                            instrpars,
                                            output=asndict['output'],
                                            group=configObj['group'],
                                            undistort=undistort,
                                            inmemory=virtual)

    # Add original file names as "hidden" attributes of imageObject
    assert (len(original_files) == len(imageObjectList)
            )  #TODO: remove after extensive testing
    for i in range(len(imageObjectList)):
        imageObjectList[i]._original_file_name = original_files[i]

    # apply context parameter
    applyContextPar(imageObjectList, configObj['context'])

    # reset DQ bits if requested by user
    resetDQBits(imageObjectList, cr_bits_value=configObj['resetbits'])

    # Add info about input IVM files at this point to the imageObjectList
    addIVMInputs(imageObjectList, ivmlist)

    if createOutwcs:
        log.info('-Creating output WCS.')

        # Build output WCS and update imageObjectList with output WCS info
        outwcs = wcs_functions.make_outputwcs(imageObjectList,
                                              output,
                                              configObj=configObj,
                                              perfect=True)
        outwcs.final_wcs.printwcs()
    else:
        outwcs = None

    try:
        # Provide user with some information on resource usage for this run
        # raises ValueError Exception in interactive mode and user quits
        num_cores = configObj.get('num_cores') if use_parallel else 1

        reportResourceUsage(imageObjectList, outwcs, num_cores)
    except ValueError:
        imageObjectList = None

    return imageObjectList, outwcs
Ejemplo n.º 10
0
def process_input(input, output=None, ivmlist=None, updatewcs=True, prodonly=False, shiftfile=None):

    ivmlist = None
    oldasndict = None

    if (isinstance(input, list) == False) and \
       ('_asn' in input or '_asc' in input) :
        # Input is an association table
        # Get the input files, and run makewcs on them
        oldasndict = asnutil.readASNTable(input, prodonly=prodonly)
        if not output:
            output = oldasndict['output']

        filelist = [fileutil.buildRootname(fname) for fname in oldasndict['order']]

    elif (isinstance(input, list) == False) and \
       (input[0] == '@') :
        # input is an @ file
        f = open(input[1:])
        # Read the first line in order to determine whether
        # IVM files have been specified in a second column...
        line = f.readline()
        f.close()
        # Parse the @-file with irafglob to extract the input filename
        filelist = irafglob.irafglob(input, atfile=atfile_sci)
        # If there is a second column...
        if len(line.split()) == 2:
            # ...parse out the names of the IVM files as well
            ivmlist = irafglob.irafglob(input, atfile=atfile_ivm)
    else:
        #input is a string or a python list
        try:
            filelist, output = parseinput.parseinput(input, outputname=output)
            #filelist.sort()
        except IOError: raise

    # sort the list of input files
    # this ensures the list of input files has the same order on all platforms
    # it can have ifferent order because listdir() uses inode order, not unix type order
    filelist.sort()
    newfilelist, ivmlist = checkFiles(filelist, ivmlist)


    if not newfilelist:
        buildEmptyDRZ(input,output)
        return None, None, output

    #make an asn table at the end
    if updatewcs:
        pydr_input = runmakewcs(newfilelist)
    else:
        pydr_input = newfilelist

    # AsnTable will handle the case when output==None
    if not oldasndict:
        oldasndict = asnutil.ASNTable(pydr_input, output=output)
        oldasndict.create()

    if shiftfile:
        oldasndict.update(shiftfile=shiftfile)

    asndict = update_member_names(oldasndict, pydr_input)

    # Build output filename
    drz_extn = '_drz.fits'
    for img in newfilelist:
        # special case logic to automatically recognize when _flc.fits files
        # are provided as input and produce a _drc.fits file instead
        if '_flc.fits' in img:
            drz_extn = '_drc.fits'
            break

    if output in [None,'']:
        output = fileutil.buildNewRootname(asndict['output'],
                                           extn=drz_extn)
    else:
        if '.fits' in output.lower():
            pass
        elif drz_extn[:4] not in output.lower():
            output = fileutil.buildNewRootname(output, extn=drz_extn)

    print('Setting up output name: ',output)

    return asndict, ivmlist, output
Ejemplo n.º 11
0
def process(inFile,
            force=False,
            newpath=None,
            inmemory=False,
            num_cores=None,
            headerlets=True):
    """ Run astrodrizzle on input file/ASN table
        using default values for astrodrizzle parameters.
    """
    # We only need to import this package if a user run the task
    import drizzlepac
    from drizzlepac import processInput  # used for creating new ASNs for _flc inputs
    import stwcs

    if headerlets:
        from stwcs.wcsutil import headerlet

    # Open the input file
    try:
        # Make sure given filename is complete and exists...
        inFilename = fileutil.buildRootname(inFile, ext=['.fits'])
        if not os.path.exists(inFilename):
            print("ERROR: Input file - %s - does not exist." % inFilename)
            return
    except TypeError:
        print("ERROR: Inappropriate input file.")
        return

    #If newpath was specified, move all files to that directory for processing
    if newpath:
        orig_processing_dir = os.getcwd()
        new_processing_dir = _createWorkingDir(newpath, inFilename)
        _copyToNewWorkingDir(new_processing_dir, inFilename)
        os.chdir(new_processing_dir)

    # Initialize for later use...
    _mname = None
    _new_asn = None
    _calfiles = []

    # Check input file to see if [DRIZ/DITH]CORR is set to PERFORM
    if '_asn' in inFilename:
        # We are working with an ASN table.
        # Use asnutil code to extract filename
        inFilename = _lowerAsn(inFilename)
        _new_asn = [inFilename]
        _asndict = asnutil.readASNTable(inFilename, None, prodonly=False)
        _cal_prodname = _asndict['output'].lower()
        _fname = fileutil.buildRootname(_cal_prodname, ext=['_drz.fits'])

        # Retrieve the first member's rootname for possible use later
        _fimg = fits.open(inFilename)
        for name in _fimg[1].data.field('MEMNAME'):
            if name[-1] != '*':
                _mname = name.split('\0', 1)[0].lower()
                break
        _fimg.close()
        del _fimg

    else:
        # Check to see if input is a _RAW file
        # If it is, strip off the _raw.fits extension...
        _indx = inFilename.find('_raw')
        if _indx < 0: _indx = len(inFilename)
        # ... and build the CALXXX product rootname.
        _mname = fileutil.buildRootname(inFilename[:_indx])
        _cal_prodname = inFilename[:_indx]
        # Reset inFilename to correspond to appropriate input for
        # drizzle: calibrated product name.
        inFilename = _mname

        if _mname == None:
            errorMsg = 'Could not find calibrated product!'
            raise Exception(errorMsg)

    # Create trailer filenames based on ASN output filename or
    # on input name for single exposures
    if '_raw' in inFile:
        # Output trailer file to RAW file's trailer
        _trlroot = inFile[:inFile.find('_raw')]
    elif '_asn' in inFile:
        # Output trailer file to ASN file's trailer, not product's trailer
        _trlroot = inFile[:inFile.find('_asn')]
    else:
        # Default: trim off last suffix of input filename
        # and replacing with .tra
        _indx = inFile.rfind('_')
        if _indx > 0:
            _trlroot = inFile[:_indx]
        else:
            _trlroot = inFile

    _trlfile = _trlroot + '.tra'

    # Open product and read keyword value
    # Check to see if product already exists...
    dkey = 'DRIZCORR'
    # ...if product does NOT exist, interrogate input file
    # to find out whether 'dcorr' has been set to PERFORM
    # Check if user wants to process again regardless of DRIZCORR keyword value
    if force: dcorr = 'PERFORM'
    else:
        if _mname:
            _fimg = fits.open(fileutil.buildRootname(_mname,
                                                     ext=['_raw.fits']))
            _phdr = _fimg['PRIMARY'].header
            if dkey in _phdr:
                dcorr = _phdr[dkey]
            else:
                dcorr = None
            _fimg.close()
            del _fimg
        else:
            dcorr = None

    time_str = _getTime()
    _tmptrl = _trlroot + '_tmp.tra'
    _drizfile = _trlroot + '_pydriz'
    _drizlog = _drizfile + ".log"  # the '.log' gets added automatically by astrodrizzle
    if dcorr == 'PERFORM':
        if '_asn.fits' not in inFilename:
            # Working with a singleton
            # However, we always want to make sure we always use
            # a calibrated product as input, if available.
            _infile = fileutil.buildRootname(_cal_prodname)
            _infile_flc = fileutil.buildRootname(_cal_prodname,
                                                 ext=['_flc.fits'])

            _cal_prodname = _infile
            _inlist = _calfiles = [_infile]

            # Add CTE corrected filename as additional input if present
            if os.path.exists(_infile_flc) and _infile_flc != _infile:
                _inlist.append(_infile_flc)

        else:
            # Working with an ASN table...
            _infile = inFilename
            flist, duplist = processInput.checkForDuplicateInputs(
                _asndict['order'])
            _calfiles = flist
            if len(duplist) > 0:
                origasn = processInput.changeSuffixinASN(inFilename, 'flt')
                dupasn = processInput.changeSuffixinASN(inFilename, 'flc')
                _inlist = [origasn, dupasn]
            else:
                _inlist = [_infile]
            # We want to keep the original specification of the calibration
            # product name, though, not a lower-case version...
            _cal_prodname = inFilename
            _new_asn.extend(_inlist)  # kept so we can delete it when finished

        # Run astrodrizzle and send its processing statements to _trlfile
        _pyver = drizzlepac.astrodrizzle.__version__

        for _infile in _inlist:  # Run astrodrizzle for all inputs
            # Create trailer marker message for start of astrodrizzle processing
            _trlmsg = _timestamp('astrodrizzle started ')
            _trlmsg = _trlmsg + __trlmarker__
            _trlmsg = _trlmsg + '%s: Processing %s with astrodrizzle Version %s\n' % (
                time_str, _infile, _pyver)
            print(_trlmsg)

            # Write out trailer comments to trailer file...
            ftmp = open(_tmptrl, 'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile, _tmptrl)

            _pyd_err = _trlroot + '_pydriz.stderr'

            try:
                b = drizzlepac.astrodrizzle.AstroDrizzle(input=_infile,
                                                         runfile=_drizfile,
                                                         configobj='defaults',
                                                         in_memory=inmemory,
                                                         num_cores=num_cores,
                                                         **pipeline_pars)
            except Exception as errorobj:
                _appendTrlFile(_trlfile, _drizlog)
                _appendTrlFile(_trlfile, _pyd_err)
                _ftrl = open(_trlfile, 'a')
                _ftrl.write(
                    'ERROR: Could not complete astrodrizzle processing of %s.\n'
                    % _infile)
                _ftrl.write(str(sys.exc_info()[0]) + ': ')
                _ftrl.writelines(str(errorobj))
                _ftrl.write('\n')
                _ftrl.close()
                print(
                    'ERROR: Could not complete astrodrizzle processing of %s.'
                    % _infile)
                raise Exception(str(errorobj))

            # Now, append comments created by PyDrizzle to CALXXX trailer file
            print('Updating trailer file %s with astrodrizzle comments.' %
                  _trlfile)
            _appendTrlFile(_trlfile, _drizlog)

        # Save this for when astropy.io.fits can modify a file 'in-place'
        # Update calibration switch
        _fimg = fits.open(_cal_prodname, mode='update')
        _fimg['PRIMARY'].header[dkey] = 'COMPLETE'
        _fimg.close()
        del _fimg

        # Enforce pipeline convention of all lower-case product
        # names
        _prodlist = glob.glob('*drz.fits')
        for _prodname in _prodlist:
            _plower = _prodname.lower()
            if _prodname != _plower: os.rename(_prodname, _plower)

    else:
        # Create default trailer file messages when astrodrizzle is not
        # run on a file.  This will typically apply only to BIAS,DARK
        # and other reference images.
        # Start by building up the message...
        _trlmsg = _timestamp('astrodrizzle skipped ')
        _trlmsg = _trlmsg + __trlmarker__
        _trlmsg = _trlmsg + '%s: astrodrizzle processing not requested for %s.\n' % (
            time_str, inFilename)
        _trlmsg = _trlmsg + '       astrodrizzle will not be run at this time.\n'
        print(_trlmsg)

        # Write message out to temp file and append it to full trailer file
        ftmp = open(_tmptrl, 'w')
        ftmp.writelines(_trlmsg)
        ftmp.close()
        _appendTrlFile(_trlfile, _tmptrl)

    _fmsg = None
    # Append final timestamp to trailer file...
    _final_msg = '%s: Finished processing %s \n' % (time_str, inFilename)
    _final_msg += _timestamp('astrodrizzle completed ')
    _trlmsg += _final_msg
    ftmp = open(_tmptrl, 'w')
    ftmp.writelines(_trlmsg)
    ftmp.close()
    _appendTrlFile(_trlfile, _tmptrl)

    # If we created a new ASN table, we need to remove it
    if _new_asn != None:
        for _name in _new_asn:
            fileutil.removeFile(_name)

    # Clean up any generated OrIg_files directory
    if os.path.exists("OrIg_files"):
        # check to see whether this directory is empty
        flist = glob.glob('OrIg_files/*.fits')
        if len(flist) == 0:
            os.rmdir("OrIg_files")
        else:
            print(
                'OrIg_files directory NOT removed as it still contained images...'
            )
    if headerlets:
        # Generate headerlets for each updated FLT image
        hlet_msg = _timestamp("Writing Headerlets started")
        for fname in _calfiles:
            frootname = fileutil.buildNewRootname(fname)
            hname = "%s_flt_hlet.fits" % frootname
            hlet_msg += "Created Headerlet file %s \n" % hname
            try:
                headerlet.write_headerlet(
                    fname,
                    'OPUS',
                    output='flt',
                    wcskey='PRIMARY',
                    author="OPUS",
                    descrip="Default WCS from Pipeline Calibration",
                    attach=False,
                    clobber=True,
                    logging=False)
            except ValueError:
                hlet_msg += _timestamp(
                    "SKIPPED: Headerlet not created for %s \n" % fname)
                # update trailer file to log creation of headerlet files
        hlet_msg += _timestamp("Writing Headerlets completed")
        ftrl = open(_trlfile, 'a')
        ftrl.write(hlet_msg)
        ftrl.close()

    # If processing was done in a temp working dir, restore results to original
    # processing directory, return to original working dir and remove temp dir
    if newpath:
        _restoreResults(new_processing_dir, orig_processing_dir)
        os.chdir(orig_processing_dir)
        _removeWorkingDir(new_processing_dir)

    # Provide feedback to user
    print(_final_msg)
Ejemplo n.º 12
0
def process(inFile,force=False,newpath=None, inmemory=False, num_cores=None,
            headerlets=True):
    """ Run astrodrizzle on input file/ASN table
        using default values for astrodrizzle parameters.
    """
    # We only need to import this package if a user run the task
    import drizzlepac
    from drizzlepac import processInput # used for creating new ASNs for _flc inputs
    import stwcs

    if headerlets:
        from stwcs.wcsutil import headerlet

    # Open the input file
    try:
        # Make sure given filename is complete and exists...
        inFilename = fileutil.buildRootname(inFile,ext=['.fits'])
        if not os.path.exists(inFilename):
            print("ERROR: Input file - %s - does not exist." % inFilename)
            return
    except TypeError:
        print("ERROR: Inappropriate input file.")
        return

    #If newpath was specified, move all files to that directory for processing
    if newpath:
        orig_processing_dir = os.getcwd()
        new_processing_dir = _createWorkingDir(newpath,inFilename)
        _copyToNewWorkingDir(new_processing_dir,inFilename)
        os.chdir(new_processing_dir)

    # Initialize for later use...
    _mname = None
    _new_asn = None
    _calfiles = []

    # Identify WFPC2 inputs to account for differences in WFPC2 inputs
    wfpc2_input = fits.getval(inFilename, 'instrume') == 'WFPC2'
    cal_ext = None

    # Check input file to see if [DRIZ/DITH]CORR is set to PERFORM
    if '_asn' in inFilename:
        # We are working with an ASN table.
        # Use asnutil code to extract filename
        inFilename = _lowerAsn(inFilename)
        _new_asn = [inFilename]
        _asndict = asnutil.readASNTable(inFilename,None,prodonly=False)
        _cal_prodname = _asndict['output'].lower()
        _fname = fileutil.buildRootname(_cal_prodname,ext=['_drz.fits'])

        # Retrieve the first member's rootname for possible use later
        _fimg = fits.open(inFilename, memmap=False)
        for name in _fimg[1].data.field('MEMNAME'):
            if name[-1] != '*':
                _mname = name.split('\0', 1)[0].lower()
                break
        _fimg.close()
        del _fimg

    else:
        # Check to see if input is a _RAW file
        # If it is, strip off the _raw.fits extension...
        _indx = inFilename.find('_raw')
        if _indx < 0: _indx = len(inFilename)
        # ... and build the CALXXX product rootname.
        if wfpc2_input:
            # force code to define _c0m file as calibrated product to be used
            cal_ext = ['_c0m.fits']
        _mname = fileutil.buildRootname(inFilename[:_indx], ext=cal_ext)

        _cal_prodname = inFilename[:_indx]
        # Reset inFilename to correspond to appropriate input for
        # drizzle: calibrated product name.
        inFilename = _mname

        if _mname is None:
            errorMsg = 'Could not find calibrated product!'
            raise Exception(errorMsg)

    # Create trailer filenames based on ASN output filename or
    # on input name for single exposures
    if '_raw' in inFile:
        # Output trailer file to RAW file's trailer
        _trlroot = inFile[:inFile.find('_raw')]
    elif '_asn' in inFile:
        # Output trailer file to ASN file's trailer, not product's trailer
        _trlroot = inFile[:inFile.find('_asn')]
    else:
        # Default: trim off last suffix of input filename
        # and replacing with .tra
        _indx = inFile.rfind('_')
        if _indx > 0:
            _trlroot = inFile[:_indx]
        else:
            _trlroot = inFile

    _trlfile = _trlroot + '.tra'

    # Open product and read keyword value
    # Check to see if product already exists...
    dkey = 'DRIZCORR'
    # ...if product does NOT exist, interrogate input file
    # to find out whether 'dcorr' has been set to PERFORM
    # Check if user wants to process again regardless of DRIZCORR keyword value
    if force:
        dcorr = 'PERFORM'
    else:
        if _mname :
            _fimg = fits.open(fileutil.buildRootname(_mname,ext=['_raw.fits']), memmap=False)
            _phdr = _fimg['PRIMARY'].header
            if dkey in _phdr:
                dcorr = _phdr[dkey]
            else:
                dcorr = None
            _fimg.close()
            del _fimg
        else:
            dcorr = None

    time_str = _getTime()
    _tmptrl = _trlroot + '_tmp.tra'
    _drizfile = _trlroot + '_pydriz'
    _drizlog = _drizfile + ".log" # the '.log' gets added automatically by astrodrizzle
    if dcorr == 'PERFORM':
        if '_asn.fits' not in inFilename:
            # Working with a singleton
            # However, we always want to make sure we always use
            # a calibrated product as input, if available.
            _infile = fileutil.buildRootname(_cal_prodname, ext=cal_ext)
            _infile_flc = fileutil.buildRootname(_cal_prodname,ext=['_flc.fits'])

            _cal_prodname = _infile
            _inlist = _calfiles = [_infile]

            # Add CTE corrected filename as additional input if present
            if os.path.exists(_infile_flc) and _infile_flc != _infile:
                _inlist.append(_infile_flc)

        else:
            # Working with an ASN table...
            _infile = inFilename
            flist,duplist = processInput.checkForDuplicateInputs(_asndict['order'])
            _calfiles = flist
            if len(duplist) > 0:
                origasn = processInput.changeSuffixinASN(inFilename,'flt')
                dupasn = processInput.changeSuffixinASN(inFilename,'flc')
                _inlist = [origasn,dupasn]
            else:
                _inlist = [_infile]
            # We want to keep the original specification of the calibration
            # product name, though, not a lower-case version...
            _cal_prodname = inFilename
            _new_asn.extend(_inlist) # kept so we can delete it when finished


        # Run astrodrizzle and send its processing statements to _trlfile
        _pyver = drizzlepac.astrodrizzle.__version__

        for _infile in _inlist: # Run astrodrizzle for all inputs
            # Create trailer marker message for start of astrodrizzle processing
            _trlmsg = _timestamp('astrodrizzle started ')
            _trlmsg = _trlmsg+ __trlmarker__
            _trlmsg = _trlmsg + '%s: Processing %s with astrodrizzle Version %s\n' % (time_str,_infile,_pyver)
            print(_trlmsg)

            # Write out trailer comments to trailer file...
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)

            _pyd_err = _trlroot+'_pydriz.stderr'

            try:
                b = drizzlepac.astrodrizzle.AstroDrizzle(input=_infile,runfile=_drizfile,
                                            configobj='defaults',in_memory=inmemory,
                                            num_cores=num_cores, **pipeline_pars)
            except Exception as errorobj:
                _appendTrlFile(_trlfile,_drizlog)
                _appendTrlFile(_trlfile,_pyd_err)
                _ftrl = open(_trlfile,'a')
                _ftrl.write('ERROR: Could not complete astrodrizzle processing of %s.\n' % _infile)
                _ftrl.write(str(sys.exc_info()[0])+': ')
                _ftrl.writelines(str(errorobj))
                _ftrl.write('\n')
                _ftrl.close()
                print('ERROR: Could not complete astrodrizzle processing of %s.' % _infile)
                raise Exception(str(errorobj))

            # Now, append comments created by PyDrizzle to CALXXX trailer file
            print('Updating trailer file %s with astrodrizzle comments.' % _trlfile)
            _appendTrlFile(_trlfile,_drizlog)

        # Save this for when astropy.io.fits can modify a file 'in-place'
        # Update calibration switch
        _fimg = fits.open(_cal_prodname, mode='update', memmap=False)
        _fimg['PRIMARY'].header[dkey] = 'COMPLETE'
        _fimg.close()
        del _fimg

        # Enforce pipeline convention of all lower-case product
        # names
        _prodlist = glob.glob('*drz.fits')
        for _prodname in _prodlist:
            _plower = _prodname.lower()
            if _prodname != _plower:  os.rename(_prodname,_plower)

    else:
        # Create default trailer file messages when astrodrizzle is not
        # run on a file.  This will typically apply only to BIAS,DARK
        # and other reference images.
        # Start by building up the message...
        _trlmsg = _timestamp('astrodrizzle skipped ')
        _trlmsg = _trlmsg + __trlmarker__
        _trlmsg = _trlmsg + '%s: astrodrizzle processing not requested for %s.\n' % (time_str,inFilename)
        _trlmsg = _trlmsg + '       astrodrizzle will not be run at this time.\n'
        print(_trlmsg)

        # Write message out to temp file and append it to full trailer file
        ftmp = open(_tmptrl,'w')
        ftmp.writelines(_trlmsg)
        ftmp.close()
        _appendTrlFile(_trlfile,_tmptrl)

    _fmsg = None
    # Append final timestamp to trailer file...
    _final_msg = '%s: Finished processing %s \n' % (time_str,inFilename)
    _final_msg += _timestamp('astrodrizzle completed ')
    _trlmsg += _final_msg
    ftmp = open(_tmptrl,'w')
    ftmp.writelines(_trlmsg)
    ftmp.close()
    _appendTrlFile(_trlfile,_tmptrl)

    # If we created a new ASN table, we need to remove it
    if _new_asn is not None:
        for _name in _new_asn: fileutil.removeFile(_name)

    # Clean up any generated OrIg_files directory
    if os.path.exists("OrIg_files"):
        # check to see whether this directory is empty
        flist = glob.glob('OrIg_files/*.fits')
        if len(flist) == 0:
            os.rmdir("OrIg_files")
        else:
            print('OrIg_files directory NOT removed as it still contained images...')
    if headerlets:
        # Generate headerlets for each updated FLT image
        hlet_msg = _timestamp("Writing Headerlets started")
        for fname in _calfiles:
            frootname = fileutil.buildNewRootname(fname)
            hname = "%s_flt_hlet.fits"%frootname
            hlet_msg += "Created Headerlet file %s \n"%hname
            try:
                headerlet.write_headerlet(fname,'OPUS',output='flt', wcskey='PRIMARY',
                    author="OPUS",descrip="Default WCS from Pipeline Calibration",
                    attach=False,clobber=True,logging=False)
            except ValueError:
                hlet_msg += _timestamp("SKIPPED: Headerlet not created for %s \n"%fname)
                # update trailer file to log creation of headerlet files
        hlet_msg += _timestamp("Writing Headerlets completed")
        ftrl = open(_trlfile,'a')
        ftrl.write(hlet_msg)
        ftrl.close()

    # If processing was done in a temp working dir, restore results to original
    # processing directory, return to original working dir and remove temp dir
    if newpath:
        _restoreResults(new_processing_dir,orig_processing_dir)
        os.chdir(orig_processing_dir)
        _removeWorkingDir(new_processing_dir)

    # Provide feedback to user
    print(_final_msg)
Ejemplo n.º 13
0
def process(inFile,force=False,newpath=None, inmemory=False, num_cores=None,
            headerlets=True, align_to_gaia=True):
    """ Run astrodrizzle on input file/ASN table
        using default values for astrodrizzle parameters.
    """
    # We only need to import this package if a user run the task
    import drizzlepac
    from drizzlepac import processInput # used for creating new ASNs for _flc inputs
    from stwcs import updatewcs
    from drizzlepac import alignimages
    
    # interpret envvar variable, if specified
    if envvar_compute_name in os.environ:
        val = os.environ[envvar_compute_name].lower()
        if val not in envvar_bool_dict:
            msg = "ERROR: invalid value for {}.".format(envvar_compute_name)
            msg += "  \n    Valid Values: on, off, yes, no, true, false"
            raise ValueError(msg)            
        align_to_gaia = envvar_bool_dict[val]

    if envvar_new_apriori_name in os.environ:
        # Reset ASTROMETRY_STEP_CONTROL based on this variable
        # This provides backward-compatibility until ASTROMETRY_STEP_CONTROL
        # gets removed entirely.
        val = os.environ[envvar_new_apriori_name].lower()
        if val not in envvar_dict:
            msg = "ERROR: invalid value for {}.".format(envvar_new_apriori_name)
            msg += "  \n    Valid Values: on, off, yes, no, true, false"
            raise ValueError(msg)

        os.environ[envvar_old_apriori_name] = envvar_dict[val]

    if headerlets or align_to_gaia:
        from stwcs.wcsutil import headerlet

    # Open the input file
    try:
        # Make sure given filename is complete and exists...
        inFilename = fileutil.buildRootname(inFile,ext=['.fits'])
        if not os.path.exists(inFilename):
            print("ERROR: Input file - %s - does not exist." % inFilename)
            return
    except TypeError:
        print("ERROR: Inappropriate input file.")
        return

    #If newpath was specified, move all files to that directory for processing
    if newpath:
        orig_processing_dir = os.getcwd()
        new_processing_dir = _createWorkingDir(newpath,inFilename)
        _copyToNewWorkingDir(new_processing_dir,inFilename)
        os.chdir(new_processing_dir)

    # Initialize for later use...
    _mname = None
    _new_asn = None
    _calfiles = []

    # Identify WFPC2 inputs to account for differences in WFPC2 inputs
    wfpc2_input = fits.getval(inFilename, 'instrume') == 'WFPC2'
    cal_ext = None

    # Check input file to see if [DRIZ/DITH]CORR is set to PERFORM
    if '_asn' in inFilename:
        # We are working with an ASN table.
        # Use asnutil code to extract filename
        inFilename = _lowerAsn(inFilename)
        _new_asn = [inFilename]
        _asndict = asnutil.readASNTable(inFilename,None,prodonly=False)
        _cal_prodname = _asndict['output'].lower()
        #_fname = fileutil.buildRootname(_cal_prodname,ext=['_drz.fits'])

        # Retrieve the first member's rootname for possible use later
        _fimg = fits.open(inFilename, memmap=False)
        for name in _fimg[1].data.field('MEMNAME'):
            if name[-1] != '*':
                _mname = name.split('\0', 1)[0].lower()
                break
        _fimg.close()
        del _fimg

    else:
        # Check to see if input is a _RAW file
        # If it is, strip off the _raw.fits extension...
        _indx = inFilename.find('_raw')
        if _indx < 0: _indx = len(inFilename)
        # ... and build the CALXXX product rootname.
        if wfpc2_input:
            # force code to define _c0m file as calibrated product to be used
            cal_ext = ['_c0m.fits']
        _mname = fileutil.buildRootname(inFilename[:_indx], ext=cal_ext)

        _cal_prodname = inFilename[:_indx]
        # Reset inFilename to correspond to appropriate input for
        # drizzle: calibrated product name.
        inFilename = _mname

        if _mname is None:
            errorMsg = 'Could not find calibrated product!'
            raise Exception(errorMsg)

    # Create trailer filenames based on ASN output filename or
    # on input name for single exposures
    if '_raw' in inFile:
        # Output trailer file to RAW file's trailer
        _trlroot = inFile[:inFile.find('_raw')]
    elif '_asn' in inFile:
        # Output trailer file to ASN file's trailer, not product's trailer
        _trlroot = inFile[:inFile.find('_asn')]
    else:
        # Default: trim off last suffix of input filename
        # and replacing with .tra
        _indx = inFile.rfind('_')
        if _indx > 0:
            _trlroot = inFile[:_indx]
        else:
            _trlroot = inFile

    _trlfile = _trlroot + '.tra'

    # Open product and read keyword value
    # Check to see if product already exists...
    dkey = 'DRIZCORR'
    # ...if product does NOT exist, interrogate input file
    # to find out whether 'dcorr' has been set to PERFORM
    # Check if user wants to process again regardless of DRIZCORR keyword value
    if force:
        dcorr = 'PERFORM'
    else:
        if _mname :
            _fimg = fits.open(fileutil.buildRootname(_mname,ext=['_raw.fits']), memmap=False)
            _phdr = _fimg['PRIMARY'].header
            if dkey in _phdr:
                dcorr = _phdr[dkey]
            else:
                dcorr = None
            _fimg.close()
            del _fimg
        else:
            dcorr = None

    time_str = _getTime()
    _tmptrl = _trlroot + '_tmp.tra'
    _drizfile = _trlroot + '_pydriz'
    _drizlog = _drizfile + ".log" # the '.log' gets added automatically by astrodrizzle
    _alignlog = _trlroot + '_align.log'
    if dcorr == 'PERFORM':
        if '_asn.fits' not in inFilename:
            # Working with a singleton
            # However, we always want to make sure we always use
            # a calibrated product as input, if available.
            _infile = fileutil.buildRootname(_cal_prodname, ext=cal_ext)
            _infile_flc = fileutil.buildRootname(_cal_prodname,ext=['_flc.fits'])

            _cal_prodname = _infile
            _inlist = _calfiles = [_infile]

            # Add CTE corrected filename as additional input if present
            if os.path.exists(_infile_flc) and _infile_flc != _infile:
                _inlist.append(_infile_flc)

        else:
            # Working with an ASN table...
            _infile = inFilename
            flist,duplist = processInput.checkForDuplicateInputs(_asndict['order'])
            _calfiles = flist
            if len(duplist) > 0:
                origasn = processInput.changeSuffixinASN(inFilename,'flt')
                dupasn = processInput.changeSuffixinASN(inFilename,'flc')
                _inlist = [origasn,dupasn]
            else:
                _inlist = [_infile]
            # We want to keep the original specification of the calibration
            # product name, though, not a lower-case version...
            _cal_prodname = inFilename
            _new_asn.extend(_inlist) # kept so we can delete it when finished

        # check to see whether FLC files are also present, and need to be updated
        # generate list of FLC files
        align_files = None
        _calfiles_flc = [f.replace('_flt.fits','_flc.fits') for f in _calfiles]
        # insure these files exist, if not, blank them out
        # Also pick out what files will be used for additional alignment to GAIA
        if not os.path.exists(_calfiles_flc[0]):
            _calfiles_flc = None
            align_files = _calfiles
            align_update_files = None
        else:
            align_files = _calfiles_flc
            align_update_files = _calfiles

        # Run updatewcs on each list of images
        updatewcs.updatewcs(_calfiles)
        if _calfiles_flc:
            updatewcs.updatewcs(_calfiles_flc)

        if align_to_gaia:
            # Perform additional alignment on the FLC files, if present
            ###############
            #
            # call hlapipeline code here on align_files list of files
            #
            ###############
            # Create trailer marker message for start of align_to_GAIA processing
            _trlmsg = _timestamp("Align_to_GAIA started ")
            print(_trlmsg)
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)
            _trlmsg = ""

            # Create an empty astropy table so it can be used as input/output for the perform_align function
            #align_table = Table()
            try:
                align_table = alignimages.perform_align(align_files,update_hdr_wcs=True, runfile=_alignlog)
                for row in align_table:
                    if row['status'] == 0:
                        trlstr = "Successfully aligned {} to {} astrometric frame\n"
                        _trlmsg += trlstr.format(row['imageName'], row['catalog'])
                    else:
                        trlstr = "Could not align {} to absolute astrometric frame\n"
                        _trlmsg += trlstr.format(row['imageName'])

            except Exception:
                # Something went wrong with alignment to GAIA, so report this in
                # trailer file
                _trlmsg = "EXCEPTION encountered in alignimages...\n"
                _trlmsg += "   No correction to absolute astrometric frame applied!\n"

            # Write the perform_align log to the trailer file...(this will delete the _alignlog)
            _appendTrlFile(_trlfile,_alignlog)

            # Append messages from this calling routine post-perform_align
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)
            _trlmsg = ""

            #Check to see whether there are any additional input files that need to
            # be aligned (namely, FLT images)
            if align_update_files and align_table:
                # Apply headerlets from alignment to FLT version of the files
                for fltfile, flcfile in zip(align_update_files, align_files):
                    row = align_table[align_table['imageName']==flcfile]
                    headerletFile = row['headerletFile'][0]
                    if headerletFile != "None":
                        headerlet.apply_headerlet_as_primary(fltfile, headerletFile,
                                                            attach=True, archive=True)
                        # append log file contents to _trlmsg for inclusion in trailer file
                        _trlstr = "Applying headerlet {} as Primary WCS to {}\n"
                        _trlmsg += _trlstr.format(headerletFile, fltfile)
                    else:
                        _trlmsg += "No absolute astrometric headerlet applied to {}\n".format(fltfile)

            # Finally, append any further messages associated with alignement from this calling routine
            _trlmsg += _timestamp('Align_to_GAIA completed ')
            print(_trlmsg)
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)

        # Run astrodrizzle and send its processing statements to _trlfile
        _pyver = drizzlepac.astrodrizzle.__version__

        for _infile in _inlist: # Run astrodrizzle for all inputs
            # Create trailer marker message for start of astrodrizzle processing
            _trlmsg = _timestamp('astrodrizzle started ')
            _trlmsg += __trlmarker__
            _trlmsg += '%s: Processing %s with astrodrizzle Version %s\n' % (time_str,_infile,_pyver)
            print(_trlmsg)

            # Write out trailer comments to trailer file...
            ftmp = open(_tmptrl,'w')
            ftmp.writelines(_trlmsg)
            ftmp.close()
            _appendTrlFile(_trlfile,_tmptrl)

            _pyd_err = _trlroot+'_pydriz.stderr'

            try:
                b = drizzlepac.astrodrizzle.AstroDrizzle(input=_infile,runfile=_drizfile,
                                            configobj='defaults',in_memory=inmemory,
                                            num_cores=num_cores, **pipeline_pars)
            except Exception as errorobj:
                _appendTrlFile(_trlfile,_drizlog)
                _appendTrlFile(_trlfile,_pyd_err)
                _ftrl = open(_trlfile,'a')
                _ftrl.write('ERROR: Could not complete astrodrizzle processing of %s.\n' % _infile)
                _ftrl.write(str(sys.exc_info()[0])+': ')
                _ftrl.writelines(str(errorobj))
                _ftrl.write('\n')
                _ftrl.close()
                print('ERROR: Could not complete astrodrizzle processing of %s.' % _infile)
                raise Exception(str(errorobj))

            # Now, append comments created by PyDrizzle to CALXXX trailer file
            print('Updating trailer file %s with astrodrizzle comments.' % _trlfile)
            _appendTrlFile(_trlfile,_drizlog)

        # Save this for when astropy.io.fits can modify a file 'in-place'
        # Update calibration switch
        _fimg = fits.open(_cal_prodname, mode='update', memmap=False)
        _fimg['PRIMARY'].header[dkey] = 'COMPLETE'
        _fimg.close()
        del _fimg

        # Enforce pipeline convention of all lower-case product
        # names
        _prodlist = glob.glob('*drz.fits')
        for _prodname in _prodlist:
            _plower = _prodname.lower()
            if _prodname != _plower:  os.rename(_prodname,_plower)

    else:
        # Create default trailer file messages when astrodrizzle is not
        # run on a file.  This will typically apply only to BIAS,DARK
        # and other reference images.
        # Start by building up the message...
        _trlmsg = _timestamp('astrodrizzle skipped ')
        _trlmsg = _trlmsg + __trlmarker__
        _trlmsg = _trlmsg + '%s: astrodrizzle processing not requested for %s.\n' % (time_str,inFilename)
        _trlmsg = _trlmsg + '       astrodrizzle will not be run at this time.\n'
        print(_trlmsg)

        # Write message out to temp file and append it to full trailer file
        ftmp = open(_tmptrl,'w')
        ftmp.writelines(_trlmsg)
        ftmp.close()
        _appendTrlFile(_trlfile,_tmptrl)

    # Append final timestamp to trailer file...
    _final_msg = '%s: Finished processing %s \n' % (time_str,inFilename)
    _final_msg += _timestamp('astrodrizzle completed ')
    _trlmsg += _final_msg
    ftmp = open(_tmptrl,'w')
    ftmp.writelines(_trlmsg)
    ftmp.close()
    _appendTrlFile(_trlfile,_tmptrl)

    # If we created a new ASN table, we need to remove it
    if _new_asn is not None:
        for _name in _new_asn: fileutil.removeFile(_name)

    # Clean up any generated OrIg_files directory
    if os.path.exists("OrIg_files"):
        # check to see whether this directory is empty
        flist = glob.glob('OrIg_files/*.fits')
        if len(flist) == 0:
            os.rmdir("OrIg_files")
        else:
            print('OrIg_files directory NOT removed as it still contained images...')

    # If headerlets have already been written out by alignment code,
    # do NOT write out this version of the headerlets
    if headerlets:
        # Generate headerlets for each updated FLT image
        hlet_msg = _timestamp("Writing Headerlets started")
        for fname in _calfiles:
            frootname = fileutil.buildNewRootname(fname)
            hname = "%s_flt_hlet.fits"%frootname
            # Write out headerlet file used by astrodrizzle, however,
            # do not overwrite any that was already written out by alignimages
            if not os.path.exists(hname):
                hlet_msg += "Created Headerlet file %s \n"%hname
                try:
                    headerlet.write_headerlet(fname,'OPUS',output='flt', wcskey='PRIMARY',
                        author="OPUS",descrip="Default WCS from Pipeline Calibration",
                        attach=False,clobber=True,logging=False)
                except ValueError:
                    hlet_msg += _timestamp("SKIPPED: Headerlet not created for %s \n"%fname)
                    # update trailer file to log creation of headerlet files
        hlet_msg += _timestamp("Writing Headerlets completed")
        ftrl = open(_trlfile,'a')
        ftrl.write(hlet_msg)
        ftrl.close()

    # If processing was done in a temp working dir, restore results to original
    # processing directory, return to original working dir and remove temp dir
    if newpath:
        _restoreResults(new_processing_dir,orig_processing_dir)
        os.chdir(orig_processing_dir)
        _removeWorkingDir(new_processing_dir)

    # Provide feedback to user
    print(_final_msg)
Ejemplo n.º 14
0
def writeAsnDict(asndict, output=None):
    """
    writeAsnDict:
    =============
    Write out a new ASN table using a dictionary in memory.
    The input dictionary should be read in using 'readAsnTable',
    and can be modified as needed before writing out the new table.

    SYNTAX:
        buildasn.writeAsnDict(asndict,output=None)

    PARAMETERS:
            asndict:    dictionary from 'readAsnTable
            output:     rootname or filename for output ASN file
        if output == None (default), product name from asndict will be
        used to define the output filename for the table.
    """

    # Convert the ASN dictionary from 'readAsnTable'
    # into a format usable by 'buildasn' functions.
    tbldict = makeTableDict(asndict)

    # Extract info from table necessary for writing it out
    if output == None:
        outfile = asndict['output'] + '_asn.fits'
    else:
        if output.find('_asn.fits') < 0:
            outfile = output + '_asn.fits'
        else:
            outfile = output

    # Delete the file if it exists.
    if os.path.exists(outfile):
        warningmsg = "\n#########################################\n"
        warningmsg += "#                                       #\n"
        warningmsg += "# WARNING:                              #\n"
        warningmsg += "#  The exisiting assocation table,      #\n"
        warningmsg += "           " + str(outfile) + '\n'
        warningmsg += "#  is being replaced by buildasn.       #\n"
        warningmsg += "#                                       #\n"
        warningmsg += "#########################################\n\n"
        print(warningmsg)
        os.remove(outfile)

    mem0name = asndict['order'][0]
    refimg = asndict['members'][mem0name]['refimage']
    shframe = tbldict['frame']

    # Input image to be used as template
    fname = buildRootname(mem0name)

    # Open output ASN table file
    fasn = pyfits.HDUList()

    # Build ASN file primary header
    _buildAsnPrimary(fasn, outfile, fname, frame=shframe, refimage=refimg)

    # Create an extension HDU which contains a table
    exthdu = _makeTableHDU(tbldict)

    fasn.append(exthdu)

    # Close ASN table file
    fasn.writeto(outfile)
    fasn.close()
    del fasn

    return outfile