示例#1
0
def fgmosLS(input_fnames, rawdir, inter_flags=inter_flags, nsrc=1, linelist=None, logfile="fgmosLS.log", verbose=False):

    # Load GMOS package from Gemini IRAF
    iraf.gemini()
    iraf.gmos()

    # Make a processed flatfield
    if matchfits.search(input_fnames["flatfile"]) == None:
        procflatfile = input_fnames["flatfile"] + "_flat.fits"
    else:
        procflatfile = re.sub(r"\.fits$", "_flat.fits", input_fnames["flatfile"])

    return
示例#2
0
def reduce_sci(sci_frames, outroot, skyfile, flatfile, dodark=False, 
               darkfile='Dark.fits'):

   """
   Runs pyraf task nireduce to reduce the science frames using the 
   skyflat frame generated from these same science frames (using the
   calib_1 function) and the domeflat generated using the make_flat function.
   """

   from pyraf import iraf
   iraf.gemini()
   iraf.niri()
   iraf.unlearn('nprepare')
   iraf.unlearn('nireduce')
   """
   Set up the file lists that will be used in the various tasks
   """

   npname  = '%s_nprep.list' % outroot
   outname = '%s_ff.fits' % outroot

   """
   Create the lists
   """

   print ""
   print "Generating list of files"
   print "---------------------------------"
   f_out  = open(outname,'w')
   for i in sci_frames:
      ofile  = 'ff%s.fits' % i
      f_out.write('%s\n' % ofile)
   f_out.close()

   if dodark:
      iraf.nireduce('@%s'%npname,outimages='@%s'%outname,fl_sky=True,
                    skyimage=skyfile,fl_flat=True,flatimage=flatfile,
                    fl_dark=True,darkimage=darkfile)
   else:
      iraf.nireduce('@%s'%npname,outimages='@%s'%outname,fl_sky=True,
                    skyimage=skyfile,fl_flat=True,flatimage=flatfile,
                    fl_dark=False)
示例#3
0
def reduce_sci(sci_frames, outroot, skyfile, flatfile, dodark=False, 
               darkfile='Dark.fits'):

   """
   Runs pyraf task nireduce to reduce the science frames using the 
   skyflat frame generated from these same science frames (using the
   calib_1 function) and the domeflat generated using the make_flat function.
   """

   from pyraf import iraf
   iraf.gemini()
   iraf.niri()
   iraf.unlearn('nprepare')
   iraf.unlearn('nireduce')
   """
   Set up the file lists that will be used in the various tasks
   """

   npname  = '%s_nprep.list' % outroot
   outname = '%s_ff.fits' % outroot

   """
   Create the lists
   """

   print ""
   print "Generating list of files"
   print "---------------------------------"
   f_out  = open(outname,'w')
   for i in sci_frames:
      ofile  = 'ff%s.fits' % i
      f_out.write('%s\n' % ofile)
   f_out.close()

   if dodark:
      iraf.nireduce('@%s'%npname,outimages='@%s'%outname,fl_sky=True,
                    skyimage=skyfile,fl_flat=True,flatimage=flatfile,
                    fl_dark=True,darkimage=darkfile)
   else:
      iraf.nireduce('@%s'%npname,outimages='@%s'%outname,fl_sky=True,
                    skyimage=skyfile,fl_flat=True,flatimage=flatfile,
                    fl_dark=False)
示例#4
0
    def reduce(self):
        """
        Prepare, reduce, mosaic FITS File - currently using IRAF, to be replaced by our own constructs

        """
        reduce_dir = os.path.join(self.fits.work_dir, 'reduced')
        if not os.path.exists(reduce_dir):
            os.mkdir(reduce_dir)

        reduce_fname = os.path.join(reduce_dir,
                                    'red-{0}'.format(self.fits.fname))

        if os.path.exists(reduce_fname):
            logger.warn('{0} exists - deleting')
            os.system('rm {0}'.format(reduce_fname))

        from pyraf import iraf
        prepare_temp_fname = tempfile.NamedTemporaryFile().name
        reduce_temp_fname = tempfile.NamedTemporaryFile().name

        iraf.gemini()
        iraf.gmos()

        iraf.gprepare(self.fits.full_path,
                      rawpath='',
                      outimag=prepare_temp_fname)
        iraf.gireduce(inimages=prepare_temp_fname,
                      outimag=reduce_temp_fname,
                      fl_over=True,
                      fl_trim=True,
                      fl_bias=False,
                      fl_dark=False,
                      fl_qeco=False,
                      fl_flat=False)

        iraf.gmosaic(inimages=reduce_temp_fname, outimages=reduce_fname)

        return reduce_fname
示例#5
0
import os, logging
import sgmllib
import sys
import urllib, sgmllib
import re
import numpy
from pyraf import iraf

iraf.gemini()
iraf.nifs()
iraf.gnirs()
iraf.gemtools()
import numpy as np
import pylab as pl
from pyraf import iraffunctions
from astropy.io import fits
from nifsDefs import convertRAdec, datefmt, writeList

#--------------------------------------------------------------------#
#                                                                    #
#     FLUX CALIBRATION                                               #
#                                                                    #
#     This module contains all the functions needed to remove        #
#     H lines from the standard star and do the flux calibration.    #
#
#                                                                    #
#    COMMAND LINE OPTIONS                                            #
#    If you wish to skip this script enter -t in the command line    #
#    Specify a spectral type or temperature with -e                  #
#    Specify a magniture with -f                                     #
#    Specify an H line fitting method with -l (default is vega)      #
def start():
    """
         nifsBaselineCalibration

         This module contains all the functions needed to reduce
         NIFS GENERAL BASELINE CALIBRATIONS

         INPUT FILES FOR EACH BASELINE CALIBRATION:

         Raw files:
           - Flat frames (lamps on)
           - Flat frames (lamps off)
           - Arc frames
           - Arc dark frames
           - Ronchi mask flat frames

         OUTPUT FILES:
         - Shift file. Eg: sCALFLAT.fits
         - Bad Pixel Mask. Eg: rgnCALFLAT_sflat_bmp.pl
         - Flat field. Eg: rgnCALFLAT_flat.fits
         - Reduced arc frame. Eg: wrgnARC.fits
         - Reduced ronchi mask. Eg: rgnRONCHI.fits
         - Reduced dark frame. Eg: rgnARCDARK.fits

    Args:
        # Loaded from runtimeData/config.cfg
        calDirList:      list of paths to calibrations. ['path/obj/date/Calibrations_grating']
        over (boolean):  overwrite old files. Default: False.
        start (int):     starting step of daycal reduction. Specified at command line with -a. Default: 1.
        stop (int):      stopping step of daycal reduction. Specified at command line with -z. Default: 6.
        debug (boolean): enable optional debugging pauses. Default: False.

    """

    # TODO(nat): stop using first frame from list as name for combined frames. Find better names and implement
    # them in pipeline and docs.
    # TODO(nat): Finish converting the print statements to logging.info() statements.

    # Store current working directory for later use.
    path = os.getcwd()

    # Set up the logging file.
    log = os.getcwd()+'/Nifty.log'

    logging.info('#################################################')
    logging.info('#                                               #')
    logging.info('# Start the NIFS Baseline Calibration Reduction #')
    logging.info('#                                               #')
    logging.info('#################################################')

    # Set up/prepare IRAF.
    iraf.gemini()
    iraf.nifs()
    iraf.gnirs()
    iraf.gemtools()

    # Reset to default parameters the used IRAF tasks.
    iraf.unlearn(iraf.gemini,iraf.gemtools,iraf.gnirs,iraf.nifs)

    # From http://bishop.astro.pomona.edu/Penprase/webdocuments/iraf/beg/beg-image.html:
    # Before doing anything involving image display the environment variable
    # stdimage must be set to the correct frame buffer size for the display
    # servers (as described in the dev$graphcap file under the section "STDIMAGE
    # devices") or to the correct image display device. The task GDEVICES is
    # helpful for determining this information for the display servers.
    iraf.set(stdimage='imt2048')

    # Prepare the IRAF package for NIFS.
    # NSHEADERS lists the header parameters used by the various tasks in the
    # NIFS package (excluding headers values which have values fixed by IRAF or
    # FITS conventions).
    iraf.nsheaders("nifs",logfile=log)

    # Set clobber to 'yes' for the script. This still does not make the gemini
    # tasks overwrite files, so:
    # YOU WILL LIKELY HAVE TO REMOVE FILES IF YOU RE_RUN THE SCRIPT.
    user_clobber=iraf.envget("clobber")
    iraf.reset(clobber='yes')

    # Load reduction parameters from ./config.cfg.
    with open('./config.cfg') as config_file:
        options = ConfigObj(config_file, unrepr=True)
        calDirList = options['calibrationDirectoryList']
        over = options['over']
        start = options['rstart']
        stop = options['rstop']
        debug = options['debug']

    ################################################################################
    # Define Variables, Reduction Lists AND identify/run number of reduction steps #
    ################################################################################

    # Loop over the Calibrations directories and reduce the day calibrations in each one.
    for calpath in calDirList:
        os.chdir(calpath)
        pwdDir = os.getcwd()+"/"
        iraffunctions.chdir(pwdDir)

        # However, don't do the reduction for a Calibration_"grating" directory without associated telluric or science data.
        # Check that a "grating" directory exists at the same level as the Calibrations_"grating" directory.
        # If not, skip the reduction of calibrations in that Calibrations_grating directory.
        # "grating" should be the last letter of calpath.
        grating = calpath[-1]
        if not os.path.exists("../"+grating):

            print "\n##############################################################################"
            print ""
            print "  No grating directory (including science or telluric data) found for  "
            print "  ", calpath
            print "  Skipping reduction of calibrations in that directory."
            print ""
            print "##############################################################################\n"

            continue

        # Create lists of each type of calibration from textfiles in Calibrations directory.
        flatlist = open('flatlist', "r").readlines()
        flatdarklist = open("flatdarklist", "r").readlines()
        arcdarklist = open("arcdarklist", "r").readlines()
        arclist = open("arclist", "r").readlines()
        ronchilist = open("ronchilist", "r").readlines()

        # Store the name of the first image of each calibration-type-list in
        # a variable for later use (Eg: calflat). This is because gemcombine will
        # merge a list of files (Eg: "n"+flatlist) and the output file will have the same
        # name as the first file in the list (Eg: calflat). These first file names are used
        # later in the pipeline.
        calflat = (flatlist[0].strip()).rstrip('.fits')
        flatdark = (flatdarklist[0].strip()).rstrip('.fits')
        arcdark = (arcdarklist[0].strip()).rstrip('.fits')
        arc = (arclist[0].strip()).rstrip('.fits')
        ronchiflat = (ronchilist[0].strip()).rstrip('.fits')

        # Check start and stop values for reduction steps. Ask user for a correction if
        # input is not valid.
        valindex = start
        while valindex > stop  or valindex < 1 or stop > 4:
            print "\n#####################################################################"
            print "#####################################################################"
            print ""
            print "     WARNING in calibrate: invalid start/stop values of calibration "
            print "                           reduction steps."
            print ""
            print "#####################################################################"
            print "#####################################################################\n"

            valindex = int(raw_input("\nPlease enter a valid start value (1 to 4, default 1): "))
            stop = int(raw_input("\nPlease enter a valid stop value (1 to 4, default 4): "))

        # Print the current directory of calibrations being processed.
        print "\n#################################################################################"
        print "                                   "
        print "  Currently working on calibrations "
        print "  in ", calpath
        print "                                   "
        print "#################################################################################\n"


        while valindex <= stop:

            #############################################################################
            ##  STEP 1: Determine the shift to the MDF (mask definition file)          ##
            ##          using nfprepare (nsoffset). Ie: locate the spectra.            ##
            ##  Output: First image in flatlist with "s" prefix.                       ##
            #############################################################################

            if valindex == 1:
                if debug:
                    a = raw_input("About to enter step 1: locate the spectrum.")
                getShift(calflat, over, log)
                print "\n###################################################################"
                print ""
                print "    STEP 1: Locate the Spectrum (Determine the shift to the MDF) - COMPLETED"
                print ""
                print "###################################################################\n"

            #############################################################################
            ##  STEP 2: Create Flat Field frame and BPM (Bad Pixel Mask)               ##
            ##  Output: Flat Field image with spatial and spectral information.        ##
            ##          First image in flatlist with  "rgn" prefix and "_flat" suffix. ##
            #############################################################################

            elif valindex == 2:
                if debug:
                    a = raw_input("About to enter step 2: flat field.")
                makeFlat(flatlist, flatdarklist, calflat, flatdark, over, log)
                print "\n###################################################################"
                print ""
                print "    STEP 2: Flat Field (Create Flat Field image and BPM image) - COMPLETED       "
                print ""
                print "###################################################################\n"

            ############################################################################
            ##  STEP 3: NFPREPARE and Combine arc darks.                              ##
            ##          NFPREPARE, Combine and flat field arcs.                       ##
            ##          Determine the wavelength solution and create the wavelength   ##
            ##          referenced arc.                                               ##
            ############################################################################

            elif valindex == 3:
                if debug:
                    a = raw_input("About to enter step 3: wavelength solution.")
                reduceArc(arclist, arc, arcdarklist, arcdark, log, over)
                wavecal(arc, log, over, path)
                print "\n###################################################################"
                print ""
                print "         STEP 3: Wavelength Solution (NFPREPARE and Combine arc darks.  "
                print "                 NFPREPARE, Combine and flat field arcs."
                print "                 Determine the wavelength solution and create the"
                print "                 wavelength referenced arc) - COMPLETED"
                print ""
                print "###################################################################\n"

            ######################################################################################
            ##  Step 4: Trace the spatial curvature and spectral distortion in the Ronchi flat. ##
            ######################################################################################

            elif valindex == 4:
                if debug:
                    a = raw_input("About to enter step 4: spatial distortion.")
                ronchi(ronchilist, ronchiflat, calflat, over, flatdark, log)
                print "\n###################################################################"
                print ""
                print "     Step 4: Spatial Distortion (Trace the spatial curvature and spectral distortion "
                print "             in the Ronchi flat) - COMPLETED"
                print ""
                print "###################################################################\n"

            else:
                print "\nERROR in nifs_baseline_calibration: step ", valindex, " is not valid.\n"
                raise SystemExit

            valindex += 1

        print "\n##############################################################################"
        print ""
        print "  COMPLETE - Calibration reductions completed for "
        print "  ", calpath
        print ""
        print "##############################################################################\n"


    # Return to directory script was begun from.
    os.chdir(path)
    return
示例#7
0
def run():
    """
    Do a telluric correction.
    """
    # Store current working directory for later use.
    path = os.getcwd()

    # Set up the logging file.
    log = os.getcwd() + '/Nifty.log'

    logging.info('\n#################################################')
    logging.info('#                                               #')
    logging.info('#       Start the NIFS Telluric Correction      #')
    logging.info('#                                               #')
    logging.info('#################################################\n')

    # Load reduction parameters from ./config.cfg.
    with open('./config.cfg') as config_file:
        config = ConfigObj(config_file, unrepr=True)
        # Read general pipeline config.
        manualMode = config['manualMode']
        over = config['over']
        scienceDirectoryList = config['scienceDirectoryList']
        # Read baselineCalibrationReduction specfic config.
        telluricCorrectionConfig = config['telluricCorrectionConfig']
        start = telluricCorrectionConfig['telluricCorrectionStart']
        stop = telluricCorrectionConfig['telluricCorrectionStop']
        hLineMethod = telluricCorrectionConfig['hLineMethod']
        hLineInter = telluricCorrectionConfig['hLineInter']
        continuumInter = telluricCorrectionConfig['continuumInter']
        telluricInter = telluricCorrectionConfig['telluricInter']
        tempInter = telluricCorrectionConfig['tempInter']
        standardStarSpecTemperature = telluricCorrectionConfig[
            'standardStarSpecTemperature']
        standardStarMagnitude = telluricCorrectionConfig[
            'standardStarMagnitude']
        standardStarBand = telluricCorrectionConfig['standardStarBand']
        standardStarRA = telluricCorrectionConfig['standardStarRA']
        standardStarDec = telluricCorrectionConfig['standardStarDec']

    # TESTING IRAF
    iraf.gemini()
    #iraf.gemtools()
    #iraf.gnirs()
    #iraf.nifs()

    #iraf.unlearn(iraf.gemini,iraf.gemtools,iraf.gnirs,iraf.nifs)

    #iraf.set(stdimage='imt2048')
    #iraf.nsheaders("nifs", logfile=log)
    #user_clobber=iraf.envget("clobber")
    #iraf.reset(clobber='yes')

    for scienceDirectory in scienceDirectoryList:
        try:
            os.chdir(scienceDirectory + '/products_telluric_corrected')
        except OSError:
            logging.info(
                "\nWARNING: no products_telluric_corrected/ directory found. Skipping this telluric correction."
            )
            continue

        scienceFrameList = open("../scienceFrameList", "r").readlines()
        scienceFrameList = [frame.strip() for frame in scienceFrameList]

        # Get standardStarBand from current directory.
        temp = os.path.split(
            os.getcwd()
        )  # Looks like: ('/Users/nat/tests/core/linearPipelineTest/HD141004/20100401/K/obs107', 'products_telluric_corrected')
        temp2 = os.path.split(
            temp[0]
        )  # Looks like: ('/Users/nat/tests/core/linearPipelineTest/HD141004/20100401/K', 'obs107')
        temp3 = os.path.split(
            temp2[0]
        )  # Looks like: ('/Users/nat/tests/core/linearPipelineTest/HD141004/20100401', 'K')
        grating = temp3[1]  # Looks like: 'K'

        for rawFrame in scienceFrameList:

            valindex = start
            while valindex <= stop:
                if valindex == 1:
                    getStandardInfo(rawFrame, standardStarMagnitude,
                                    standardStarSpecTemperature,
                                    standardStarBand, standardStarRA,
                                    standardStarDec, log, over)

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 1 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 2:
                    hLineCorrection(rawFrame, grating, hLineInter, hLineMethod,
                                    tempInter, log, over)

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 2 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 3:
                    fitContinuum(rawFrame, grating, continuumInter, tempInter,
                                 log, over)
                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 3 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 4:
                    divideByContinuum(rawFrame, log, over)
                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 4 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 5:
                    get1dSpecFromCube(rawFrame, log, over)
                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 5 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 6:
                    getShiftScale(rawFrame, telluricInter, log, over)
                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 6 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )
                # Shift and scale the telluric correction spectrum and continuum fit to the telluric correction spectrum.
                if valindex == 7:
                    shiftScaleSpec(rawFrame, "2_fit", "6_shiftedFit", log,
                                   over)
                    shiftScaleSpec(rawFrame, "3_chtel", "7_schtel", log, over)
                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 7 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 8:
                    divideCubebyTel(rawFrame, log, over)
                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 8 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 9:
                    copyToFluxCalDirectory(rawFrame, log, over)
                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 9 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                valindex += 1

        os.chdir(path)
示例#8
0
def reduce_stdstar(rawdir,
                   rundir,
                   caldir,
                   starobj,
                   stdstar,
                   flat,
                   arc,
                   twilight,
                   twilight_flat,
                   starimg,
                   bias,
                   overscan,
                   vardq,
                   lacos,
                   observatory,
                   apply_lacos,
                   lacos_xorder,
                   lacos_yorder,
                   lacos_objlim,
                   lacos_sigclip,
                   bpm,
                   instrument,
                   slits,
                   fl_gscrrej,
                   wltrim_frac=0.03,
                   sens_order=6,
                   sens_function='spline3',
                   apsum_radius=1):
    """
    Reduction pipeline for standard star.

    Parameters
    ----------
    rawdir: string
        Directory containing raw images.
    rundi: string
        Directory where processed files are saved.
    caldir: string
        Directory containing standard star calibration files.
    starobj: string
        Object keyword for the star image.
    stdstar: string
        Star name in calibration file.
    flat: list
        Names of the files containing flat field images.
    arc: list
        Arc images.
    twilight: list
        Twilight flat images.
    starimg: string
        Name of the file containing the image to be reduced.
    bias: list
        Bias images.

    """

    iraf.set(stdimage='imtgmos')

    iraf.task(lacos_spec=lacos)

    iraf.gemini()
    iraf.unlearn('gemini')

    iraf.gmos()
    iraf.unlearn('gmos')

    iraf.gemtools()
    iraf.unlearn('gemtools')

    iraf.gmos.logfile = 'logfile.log'
    iraf.gemtools.gloginit.logfile = 'logfile.log'
    iraf.gfextract.verbose = 'no'

    # set directories
    iraf.set(caldir=rawdir)  #
    iraf.set(rawdir=rawdir)  # raw files
    iraf.set(procdir=rundir)  # processed files

    # os.path.isfile('arquivo')

    iraf.cd('procdir')

    flat = flat.strip('.fits')
    twilight = twilight.strip('.fits')
    twilight_flat = twilight_flat.strip('.fits')
    arc = arc.strip('.fits')
    starimg = starimg.strip('.fits')
    mdffile = 'mdf' + flat + '.fits'

    iraf.gfreduce.bias = 'rawdir$' + bias
    iraf.gfreduce.fl_fulldq = 'yes'
    iraf.gfreduce.fl_fixgaps = 'yes'
    iraf.gireduce.bpm = 'rawdir$' + bpm

    cal_reduction(rawdir=rawdir,
                  rundir=rundir,
                  flat=flat,
                  arc=arc,
                  twilight=twilight,
                  bias=bias,
                  bpm=bpm,
                  overscan=overscan,
                  vardq=vardq,
                  instrument=instrument,
                  slits=slits,
                  twilight_flat=twilight_flat)
    #
    #   Actually reduce star
    #
    imageName = 'rg' + starimg + '.fits'
    if os.path.isfile(imageName):
        skipwarn(imageName)
    else:

        imageName = 'g' + starimg + '.fits'
        if os.path.isfile(imageName):
            iraf.printlog(
                'GIREDS: WARNING: Removing file {:s}'.format(imageName),
                'logfile.log', 'yes')
            iraf.delete(imageName)

        iraf.gfreduce(starimg,
                      slits='header',
                      rawpath='rawdir$',
                      fl_inter='no',
                      fl_addmdf='yes',
                      key_mdf='MDF',
                      mdffile=mdffile,
                      weights='no',
                      fl_over=overscan,
                      fl_trim='yes',
                      fl_bias='yes',
                      trace='no',
                      recenter='no',
                      fl_flux='no',
                      fl_gscrrej='no',
                      fl_extract='no',
                      fl_gsappwave='no',
                      fl_wavtran='no',
                      fl_novl='yes',
                      fl_skysub='no',
                      fl_vardq=vardq,
                      mdfdir='procdir$')
    prefix = 'rg'
    #
    # Gemfix
    #
    imageName = 'p' + prefix + starimg + '.fits'
    if os.path.isfile(imageName):
        skipwarn(imageName)
    else:
        iraf.gemfix(prefix + starimg,
                    out='p' + prefix + starimg,
                    method='fit1d',
                    bitmask=1,
                    axis=1)
    prefix = 'p' + prefix
    #
    # LA Cosmic
    #
    if apply_lacos:

        imageName = 'l' + prefix + starimg + '.fits'
        if os.path.isfile(imageName):
            skipwarn(imageName)
        else:
            if apply_lacos:
                iraf.gemcrspec(prefix + starimg,
                               out='l' + prefix + starimg,
                               sigfrac=0.32,
                               niter=4,
                               fl_vardq=vardq,
                               xorder=lacos_xorder,
                               yorder=lacos_yorder,
                               objlim=lacos_objlim,
                               sigclip=lacos_sigclip)

        prefix = 'l' + prefix
    #
    # Extraction and Gemini's comsmic ray rejection
    #
    if fl_gscrrej:
        imageName = 'x' + prefix + starimg + '.fits'

        if os.path.isfile(imageName):
            skipwarn(imageName)
            fl_gscrrej = False
        else:
            imageName = 'ex' + prefix + starimg + '.fits'
            if os.path.isfile(imageName):
                skipwarn(imageName)
            else:
                iraf.gfreduce(prefix + starimg,
                              slits='header',
                              rawpath='./',
                              fl_inter='no',
                              fl_addmdf='no',
                              key_mdf='MDF',
                              mdffile=mdffile,
                              fl_over='no',
                              fl_trim='no',
                              fl_bias='no',
                              trace='no',
                              recenter='no',
                              fl_flux='no',
                              fl_gscrrej=fl_gscrrej,
                              fl_extract='yes',
                              fl_gsappwave='yes',
                              fl_wavtran='no',
                              fl_novl='no',
                              fl_skysub='no',
                              reference='eprg' + flat,
                              weights='no',
                              wavtraname='eprg' + arc,
                              response='eprg' + flat + '_response.fits',
                              fl_vardq=vardq)
        prefix = 'ex' + prefix
    else:
        imageName = 'e' + prefix + starimg + '.fits'

        if os.path.isfile(imageName):
            skipwarn(imageName)
        else:
            iraf.gfreduce(prefix + starimg,
                          slits='header',
                          rawpath='./',
                          fl_inter='no',
                          fl_addmdf='no',
                          key_mdf='MDF',
                          mdffile=mdffile,
                          fl_over='no',
                          fl_trim='no',
                          fl_bias='no',
                          trace='no',
                          recenter='no',
                          fl_flux='no',
                          fl_gscrrej=fl_gscrrej,
                          fl_extract='yes',
                          fl_gsappwave='yes',
                          fl_wavtran='no',
                          fl_novl='no',
                          fl_skysub='no',
                          reference='eprg' + flat,
                          weights='no',
                          wavtraname='eprg' + arc,
                          response='eprg' + flat + '_response.fits',
                          fl_vardq=vardq)
        prefix = 'e' + prefix
    #
    # Wavelength transform
    #
    wl1, wl2 = wl_lims(prefix + starimg + '.fits', wltrim_frac)
    imageName = 't' + prefix + starimg + '.fits'
    if os.path.isfile(imageName):
        skipwarn(imageName)
    else:
        iraf.gfreduce(prefix + starimg,
                      slits='header',
                      rawpath='./',
                      fl_inter='no',
                      fl_addmdf='no',
                      key_mdf='MDF',
                      mdffile=mdffile,
                      fl_over='no',
                      fl_trim='no',
                      fl_bias='no',
                      trace='no',
                      recenter='no',
                      fl_flux='no',
                      fl_gscrrej='no',
                      fl_extract='no',
                      fl_gsappwave='no',
                      fl_wavtran='yes',
                      fl_novl='no',
                      fl_skysub='no',
                      reference='eprg' + flat,
                      weights='no',
                      wavtraname='eprg' + arc,
                      response='eprg' + flat + '_response.fits',
                      fl_vardq=vardq,
                      w1=wl1,
                      w2=wl2)
    prefix = 't' + prefix

    #
    # Sky subtraction
    #
    imageName = 's' + prefix + starimg + '.fits'
    if os.path.isfile(imageName):
        skipwarn(imageName)
    else:
        iraf.gfreduce(prefix + starimg,
                      slits='header',
                      rawpath='./',
                      fl_inter='no',
                      fl_addmdf='no',
                      key_mdf='MDF',
                      mdffile=mdffile,
                      fl_over='no',
                      fl_trim='no',
                      fl_bias='no',
                      trace='no',
                      recenter='no',
                      fl_flux='no',
                      fl_gscrrej='no',
                      fl_extract='no',
                      fl_gsappwave='no',
                      fl_wavtran='no',
                      fl_novl='no',
                      fl_skysub='yes',
                      reference='eprg' + flat,
                      weights='no',
                      wavtraname='eprg' + arc,
                      response='eprg' + flat + '_response.fits',
                      fl_vardq=vardq,
                      w1=wl1,
                      w2=wl2)
    prefix = 's' + prefix
    #
    #   Apsumming the stellar spectra
    #
    xinst = pf.getdata(prefix + starimg + '.fits', ext=1)['XINST']
    if instrument == 'GMOS-N':
        x0 = np.average(xinst[xinst < 10])
    elif instrument == 'GMOS-S':
        x0 = np.average(xinst[xinst > 10])

    ap_expression = '((XINST-{:.2f})**2 + '\
        '(YINST-2.45)**2)**0.5 < {:.2f}'.format(x0, apsum_radius)

    imageName = 'a' + prefix + starimg + '.fits'
    if os.path.isfile(imageName):
        skipwarn(imageName)
    else:
        iraf.gfapsum(prefix + starimg,
                     fl_inter='no',
                     lthreshold='INDEF',
                     hthreshold='INDEF',
                     reject='avsigclip',
                     expr=ap_expression)
    #
    #   Building sensibility function
    #
    if os.path.isfile('std' + starimg)\
            and os.path.isfile('sens' + starimg + '.fits'):
        skipwarn('std{0:s} and sens{0:s}.fits'.format(starimg))
    else:

        imageName = 'std' + starimg
        if os.path.isfile(imageName):
            iraf.printlog(
                'GIREDS: WARNING: Removing file {:s}'.format(imageName),
                'logfile.log', 'yes')
            iraf.delete(imageName)

        imageName = 'sens' + starimg + '.fits'
        if os.path.isfile(imageName):
            iraf.printlog(
                'GIREDS: WARNING: Removing file {:s}'.format(imageName),
                'logfile.log', 'yes')
            iraf.delete(imageName)

        iraf.gsstandard('a' + prefix + starimg,
                        starname=stdstar,
                        observatory=observatory,
                        sfile='std' + starimg,
                        sfunction='sens' + starimg,
                        caldir=caldir,
                        order=sens_order,
                        function=sens_function)
    #
    #   Apply flux calibration to star
    #
    imageName = 'c' + prefix + starimg + '.fits'
    if os.path.isfile(imageName):
        skipwarn(imageName)
    else:
        iraf.gscalibrate(prefix + starimg,
                         sfuncti='sens' + starimg,
                         extinct='onedstds$ctioextinct.dat',
                         observatory=observatory,
                         fluxsca=1,
                         fl_vardq=vardq)
    #
    #   Create data cubes
    #
    imageName = 'dc' + prefix + starimg + '.fits'
    if os.path.isfile(imageName):
        skipwarn(imageName)
    else:
        data_cube = CubeBuilder('c' + prefix + starimg + '.fits')
        data_cube.build_cube()
        data_cube.fit_refraction_function()
        data_cube.fix_atmospheric_refraction()
        data_cube.write(imageName)

    #
    # Test calibration
    #
    iraf.cd(caldir)
    caldata = np.loadtxt(stdstar + '.dat', unpack=True)
    iraf.cd('procdir')
    calflux = mag2flux(caldata[0], caldata[1])

    imageName = 'ca' + prefix + starimg + '.fits'
    if os.path.isfile(imageName):
        skipwarn(imageName)
    else:
        iraf.gscalibrate('a' + prefix + starimg,
                         sfuncti='sens' + starimg,
                         extinct='onedstds$ctioextinct.dat',
                         observatory=observatory,
                         fluxsca=1)

    sumflux = pf.getdata('ca' + prefix + starimg + '.fits', ext=2)
    sumhead = pf.getheader('ca' + prefix + starimg + '.fits', ext=2)
    sumwl = sumhead['crval1'] + np.arange(
        sumhead['naxis1']) * sumhead['cdelt1']

    plt.close('all')
    plt.plot(sumwl, sumflux, 'b', lw=.5)
    plt.plot(caldata[0], calflux, 'r', lw=1.5)
    plt.xlim(sumwl[0] * .99, sumwl[-1] * 1.01)
    plt.ylim(min(calflux) * .8, max(calflux) * 1.2)
    plt.savefig('calib' + starimg + '.eps')
示例#9
0
def reduce_stdstar(rawdir, rundir, caldir, starobj, stdstar, flat,
    arc, twilight, starimg, bias, overscan, vardq):
    """
    Reduction pipeline for standard star.

    Parameters
    ----------
    rawdir: string
        Directory containing raw images.
    rundi: string
        Directory where processed files are saved.
    caldir: string
        Directory containing standard star calibration files.
    starobj: string
        Object keyword for the star image.
    stdstar: string
        Star name in calibration file.
    flat: list
        Names of the files containing flat field images.
    arc: list
        Arc images.
    twilight: list
        Twilight flat images.
    starimg: string
        Name of the file containing the image to be reduced.
    bias: list
        Bias images.
    
    """

    iraf.set(stdimage='imtgmos')
    
    iraf.gemini()
    iraf.gemtools()
    iraf.gmos()
    
    #iraf.unlearn('gemini')
    #iraf.unlearn('gmos')
    
    iraf.task(lacos_spec='/storage/work/gemini_pairs/lacos_spec.cl')
    
    tstart = time.time()
    
    #set directories
    iraf.set(caldir=rawdir)  # 
    iraf.set(rawdir=rawdir)  # raw files
    iraf.set(procdir=rundir)  # processed files
    
    iraf.gmos.logfile='logfile.log'
    
    iraf.cd('procdir')
    
    # building lists
    
    def range_string(l):
        return (len(l)*'{:4s},').format(*[i[-9:-5] for i in l])
    
    iraf.gemlist(range=range_string(flat), root=flat[0][:-9],
        Stdout='flat.list')
    iraf.gemlist(range=range_string(arc), root=arc[0][:-9],
        Stdout='arc.list')
    #iraf.gemlist(range=range_string(star), root=star[0][:-4],
    #    Stdout='star.list')
    iraf.gemlist(range=range_string(twilight),
        root=twilight[0][:-9], Stdout='twilight.list')
    
    iraf.gfreduce.bias = 'caldir$'+bias[0]
    
    #######################################################################
    #######################################################################
    ###   Star reduction                                                  #
    #######################################################################
    #######################################################################
    
    #
    #   Flat reduction
    #
    
    iraf.gfreduce(
        '@flat.list', slits='header', rawpath='rawdir$', fl_inter='no',
        fl_addmdf='yes', key_mdf='MDF', mdffile='default', weights='no',
        fl_over=overscan, fl_trim='yes', fl_bias='yes', trace='yes', t_order=4,
        fl_flux='no', fl_gscrrej='no', fl_extract='yes', fl_gsappwave='no',
        fl_wavtran='no', fl_novl='no', fl_skysub='no', reference='',
        recenter='yes', fl_vardq=vardq)
    
    iraf.gfreduce('@twilight.list', slits='header', rawpath='rawdir$',
        fl_inter='no', fl_addmdf='yes', key_mdf='MDF',
        mdffile='default', weights='no',
        fl_over=overscan, fl_trim='yes', fl_bias='yes', trace='yes',
        recenter='no',
        fl_flux='no', fl_gscrrej='no', fl_extract='yes', fl_gsappwave='no',
        fl_wavtran='no', fl_novl='no', fl_skysub='no',
        reference='erg'+flat[0], fl_vardq=vardq)
    #
    #   Response function
    #
    
    
    for i, j in enumerate(flat):

        j = j[:-5]
    
        iraf.imdelete(j+'_response')
        iraf.gfresponse('erg'+j+'.fits', out='erg'+j+'_response',
            skyimage='erg'+twilight[i], order=95, fl_inter='no',
            func='spline3',
            sample='*', verbose='yes')
    
    #   Arc reduction
    #
    
    iraf.gfreduce(
        '@arc.list', slits='header', rawpath='rawdir$', fl_inter='no',
        fl_addmdf='yes', key_mdf='MDF', mdffile='default', weights='no',
        fl_over=overscan, fl_trim='yes', fl_bias='yes', trace='no',
        recenter='no',
        fl_flux='no', fl_gscrrej='no', fl_extract='yes', fl_gsappwave='no',
        fl_wavtran='no', fl_novl='no', fl_skysub='no', reference='erg'+flat[0],
        fl_vardq=vardq)
    
    
    #   Finding wavelength solution
    #   Note: the automatic identification is very good
    #
    
    for i in arc:
        
        iraf.gswavelength('erg'+i, function='chebyshev', nsum=15, order=4,
            fl_inter='no', nlost=5, ntarget=20, aiddebug='s', threshold=5,
            section='middle line')
    
    #
    #   Apply wavelength solution to the lamp 2D spectra
    #
    
        iraf.gftransform('erg'+i, wavtran='erg'+i, outpref='t', fl_vardq=vardq)
    
    ##
    ##   Actually reduce star
    ##
    
    
    iraf.gfreduce(
        starimg, slits='header', rawpath='rawdir$', fl_inter='no',
        fl_addmdf='yes', key_mdf='MDF', mdffile='default', weights='no',
        fl_over=overscan, fl_trim='yes', fl_bias='yes', trace='no',
        recenter='no',
        fl_flux='no', fl_gscrrej='no', fl_extract='no', fl_gsappwave='no',
        fl_wavtran='no', fl_novl='yes', fl_skysub='no', fl_vardq=vardq)
    
    iraf.gemcrspec('rg{:s}'.format(starimg), out='lrg'+starimg, sigfrac=0.32, 
         niter=4, fl_vardq=vardq)
         
    iraf.gfreduce(
        'lrg'+starimg, slits='header', rawpath='./', fl_inter='no',
        fl_addmdf='no', key_mdf='MDF', mdffile='default',
        fl_over='no', fl_trim='no', fl_bias='no', trace='no',
        recenter='no',
        fl_flux='no', fl_gscrrej='no', fl_extract='yes',
        fl_gsappwave='yes',
        fl_wavtran='yes', fl_novl='no', fl_skysub='yes',
        reference='erg'+flat[0][:-5], weights='no',
        wavtraname='erg'+arc[0][:-5],
        response='erg'+flat[0][:-5]+'_response.fits',
        fl_vardq=vardq)
    #
    #   Apsumming the stellar spectra
    #
    iraf.gfapsum(
        'stexlrg'+starimg, fl_inter='no', lthreshold=400.,
        reject='avsigclip')
    #
    #   Building sensibility function
    #
    
    
    iraf.gsstandard(
        ('astexlrg{:s}').format(starimg), starname=stdstar,
        observatory='Gemini-South', sfile='std', sfunction='sens',
        caldir=caldir)
    #
    #   Apply flux calibration to galaxy
    #
    #
    ##iraf.imdelete('*****@*****.**')
    #
    ##iraf.gscalibrate('*****@*****.**',sfunction='sens.fits',fl_ext='yes',extinct='onedstds$ctioextinct.dat',observatory='Gemini-South',fluxsca=1)
    #
    ##
    ##   Create data cubes
    ##
    #
    #
    ##for i in objs:
    ##  iraf.imdelete('d0.1cstexlrg'+i+'.fits')
    ##  iraf.gfcube('cstexlrg'+i+'.fits',outpref='d0.1',ssample=0.1,fl_atmd='yes',fl_flux='yes')
    #
    ##
    ## Combine cubes
    ##
    #
    #
    ##iraf.imdelete('am2306-721r4_wcsoffsets.fits')
    ##iraf.imcombine('d0.1cstexlrgS20141113S00??.fits[1]',output='am2306-721r4_wcsoffsets.fits',combine='average',reject='sigclip',masktype='badvalue',lsigma=2,hsigma=2,offset='wcs',outlimits='2 67 2 48 100 1795')
    #
    
    tend = time.time()
    
    print('Elapsed time in reduction: {:.2f}'.format(tend - tstart))
示例#10
0
def run():
    """
    Do a flux calibration.
    """
    # Store current working directory for later use.
    path = os.getcwd()
    # Set up the logging file.
    log = os.getcwd() + '/Nifty.log'
    # Set up iraf
    iraf.gemini()
    #iraf.unlearn("gemini")

    #iraf.unlearn(iraf.gemini,iraf.gemtools,iraf.gnirs,iraf.nifs,iraf.imcopy)

    logging.info('\n#################################################')
    logging.info('#                                               #')
    logging.info('#       Start the NIFS Flux Calibration         #')
    logging.info('#                                               #')
    logging.info('#################################################\n')

    # Load reduction parameters from ./config.cfg.
    with open('./config.cfg') as config_file:
        config = ConfigObj(config_file, unrepr=True)
        # Read general pipeline config.
        manualMode = config['manualMode']
        over = config['over']
        scienceDirectoryList = config['scienceDirectoryList']
        # Read baselineCalibrationReduction specfic config.
        fluxCalbrationConfig = config['fluxCalbrationConfig']
        start = fluxCalbrationConfig['fluxCalibrationStart']
        stop = fluxCalbrationConfig['fluxCalibrationStop']

    for scienceDirectory in scienceDirectoryList:
        try:
            os.chdir(scienceDirectory +
                     '/products_fluxcal_AND_telluric_corrected')
        except OSError:
            logging.info(
                "\nWARNING: no products_fluxcal_AND_telluric_corrected/ directory found. Skipping this telluric correction."
            )
            continue

        scienceFrameList = open("../scienceFrameList", "r").readlines()
        scienceFrameList = [frame.strip() for frame in scienceFrameList]

        # Get grating from current directory.
        temp = os.path.split(
            os.getcwd()
        )  # Looks like: ('/Users/nat/tests/core/linearPipelineTest/HD141004/20100401/K/obs107', 'products_telluric_corrected')
        temp2 = os.path.split(
            temp[0]
        )  # Looks like: ('/Users/nat/tests/core/linearPipelineTest/HD141004/20100401/K', 'obs107')
        temp3 = os.path.split(
            temp2[0]
        )  # Looks like: ('/Users/nat/tests/core/linearPipelineTest/HD141004/20100401', 'K')
        grating = temp3[1]  # Looks like: 'K'

        for rawFrame in scienceFrameList:

            valindex = start
            while valindex <= stop:

                if valindex == 1:
                    divideByContinuum(rawFrame, log, over)

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 1 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 2:
                    makeFLambda(rawFrame, grating, log, over)

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 2 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 3:
                    makeBlackBody(rawFrame, grating, log, over)

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 3 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 4:
                    makeBlackBodyScale(rawFrame, log, over)

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 4 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 5:
                    scaleBlackBody(rawFrame, log, over)

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 5 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                if valindex == 6:
                    multiplyByBlackBody(rawFrame, log, over)

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info("  STEP 6 - COMPLETED ")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

                valindex += 1

        os.chdir(path)
示例#11
0
def make_flat(on_frames, off_frames, rawroot, band, rawdir='.'):
   """
   Makes an imaging flat-field file using the niflat task in pyraf
   """

   from pyraf import iraf
   iraf.gemini()
   iraf.niri()
   iraf.unlearn('nprepare')
   iraf.unlearn('niflat')

   """
   Set up the file lists that will be used in the various tasks
   """

   rawname = 'flats_%s_all_raw.list' % band
   npname  = 'flats_%s_all_nprepare.list' % band
   onname  = 'flats_%s_on.list' % band
   offname = 'flats_%s_off.list' % band
   outflat = 'Flat_%s.fits' % band

   """
   Create the lists
   """

   print ""
   print "Generating list of lamps-on files"
   print "---------------------------------"
   all_list = []
   f_in  = open(rawname,'w')
   f_np  = open(npname,'w')
   f_on  = open(onname,'w')
   f_off = open(offname,'w')
   for i in on_frames:
      infile = '%s%s.fits' % (rawroot,i)
      ofile  = 'np%s.fits' % i
      print " %s" % infile
      f_in.write('%s\n' % infile)
      f_np.write('%s\n' % ofile)
      f_on.write('%s\n' % ofile)
      all_list.append(ofile)
   print ""
   print "Generating list of lamps-off files"
   print "----------------------------------"
   for i in off_frames:
      infile = '%s%s.fits' % (rawroot,i)
      ofile  = 'np%s.fits' % i
      print " %s" % infile
      f_in.write('%s\n' % infile)
      f_np.write('%s\n' % ofile)
      f_off.write('%s\n' % ofile)
      all_list.append(ofile)
   f_in.close()
   f_np.close()
   f_on.close()
   f_off.close()

   """ 
   Run pyraf task nprepare to convert format of raw files to the one that is
   expected for subsequent tasks.
   """
   print ""
   print "Running nprepare on raw flat frames"
   print ""
   iraf.nprepare('@%s' %rawname,rawpath=rawdir,outimages='@%s'%npname)

   """
   Run pyraf task niflat to actually make the flat-field file
   """
   iraf.niflat('@%s'%onname,flatfile=outflat,lampsoff='@%s'%offname)


   """ Clean up """
   for i in all_list:
      os.remove(i)

   """ Give some information about the output file """
   print ""
   print "New flatfield file summary"
   print "------------------------------------------------------------------"
   flathdu = pf.open(outflat)
   flathdu.info()
   flathdu.close()
示例#12
0
def calib_1(sci_frames, rawroot, outroot, bpmfile, rawdir='.', obsdate=None, 
            do_nresid=False):
   """
   Does the non-linearity correction and then converts the linearized
    input files to the format expected by the Gemini pyraf tasks.  
   These steps are done through one Gemini and three pyraf tasks:
      nirlin    - to correct the raw files for nonlinearity
      nprepare  - to convert the linearized files to the appropriate format
      nresidual - [NOT IMPLEMENTED YET] to deal with persistence, if desired
      nisky     - to create the first-pass sky image
   """

   from pyraf import iraf
   iraf.gemini()
   iraf.niri()
   iraf.unlearn('nprepare')
   iraf.unlearn('nisky')
   import nirlin

   """
   Set up the file lists that will be used in the various tasks
   """

   rawname    = '%s_raw.list' % outroot
   lincorname = '%s_lincor.list' % outroot
   npname     = '%s_nprep.list' % outroot
   outname    = '%s_sky.fits' % outroot

   """
   Create the lists
   """

   print ""
   print "Generating list of science files"
   print "---------------------------------"
   raw_list = []
   lc_list = []
   #np_list = []
   f_in  = open(rawname,'w')
   f_lc  = open(lincorname,'w')
   f_np  = open(npname,'w')
   for i in sci_frames:
      infile = '%s%s.fits' % (rawroot,i)
      raw_list.append('%s/%s' % (rawdir,infile))
      if obsdate is not None:
         outframe = '%s_%s' % (obsdate,i)
      else:
         outframe = i
      lcfile = 'lincor%s.fits' % outframe
      ofile  = 'np%s.fits' % outframe
      print " %s" % infile
      f_in.write('%s\n' % infile)
      f_lc.write('%s\n' % lcfile)
      f_np.write('%s\n' % ofile)
      lc_list.append(lcfile)
      #np_list.append(ofile)
   f_in.close()
   f_lc.close()
   f_np.close()

   """
   Run nirlin to do the non-linearity correction
   """
   print ''
   print 'Running nirlin to correct for non-linearities'
   print '---------------------------------------------'
   print ''
   for i in range(len(raw_list)):
      nirlin.nirlin(raw_list[i],outputfile=lc_list[i])
   #   if i==0:
   #      npstring = np_list[0]
   #   else:
   #      npstring += ',%s' % np_list[i]

   """ 
   Run pyraf task nprepare to convert format of raw files to the one that is
   expected for subsequent tasks.
   """
   print ""
   print "Running nprepare on linearized raw frames"
   print ""
   print "%s %s %s %s" %(rawdir,lincorname,npname,bpmfile)
   #print "%s" % npstring
   tmplc = '@%s' % lincorname
   tmpnp = '@%s' % npname
   print tmplc, tmpnp
   iraf.nprepare(tmplc,outimages=tmpnp,bpm=bpmfile)

   """
   Run pyraf task nisky to make the initial sky frame
   """
   iraf.nisky(tmpnp,outimage=outname)
示例#13
0
import time
from IQTool.iq import detectSources as ds
#import iqUtil
import pyfits as pf
import pyraf
from pyraf import iraf
from pyraf.iraf import gemini
gemini()
gemini.gmos()

from astrodata.adutils.future import pyDisplay
from astrodata.adutils.future import starFilter

'''
!!!!!!!!!!!!!!!!!!!!!!! READ ME !!!!!!!!!!!!!!!!!!!!!!!!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@author: River Allen

What is this? - An experiment in creating a 'star filter' that could learn.

Why did you do this? - I had read an article on spam filtering using Bayesian Techniques and was inspired.
The article can be found here: http://www.paulgraham.com/spam.html

How does it work? - In it's primitive unfinished form, you simply run it on a selected image and click inside
the boxes of things that you think should be not be detected. Then you close ds9. 
If you run it again, these things will not be detected the second time around. The goal was to have some 
implement a balanced probability based Bayesian type algorithm, but I never got around to actually getting 
a good one. Right now it is 'uneven' or too strong,and is not really learning except in the sense that it 
will not detect the EXACT same object again, rather than similar objects. For the most part, look in/modify 
star filter in the gemini_python/trunk/utils/future for improving this.
示例#14
0
def calib_1(sci_frames, rawroot, outroot, bpmfile, rawdir='.', obsdate=None, 
            do_nresid=False):
   """
   Does the non-linearity correction and then converts the linearized
    input files to the format expected by the Gemini pyraf tasks.  
   These steps are done through one Gemini and three pyraf tasks:
      nirlin    - to correct the raw files for nonlinearity
      nprepare  - to convert the linearized files to the appropriate format
      nresidual - [NOT IMPLEMENTED YET] to deal with persistence, if desired
      nisky     - to create the first-pass sky image
   """

   from pyraf import iraf
   iraf.gemini()
   iraf.niri()
   iraf.unlearn('nprepare')
   iraf.unlearn('nisky')
   import nirlin

   """
   Set up the file lists that will be used in the various tasks
   """

   rawname    = '%s_raw.list' % outroot
   lincorname = '%s_lincor.list' % outroot
   npname     = '%s_nprep.list' % outroot
   outname    = '%s_sky.fits' % outroot

   """
   Create the lists
   """

   print ""
   print "Generating list of science files"
   print "---------------------------------"
   raw_list = []
   lc_list = []
   #np_list = []
   f_in  = open(rawname,'w')
   f_lc  = open(lincorname,'w')
   f_np  = open(npname,'w')
   for i in sci_frames:
      infile = '%s%s.fits' % (rawroot,i)
      raw_list.append('%s/%s' % (rawdir,infile))
      if obsdate is not None:
         outframe = '%s_%s' % (obsdate,i)
      else:
         outframe = i
      lcfile = 'lincor%s.fits' % outframe
      ofile  = 'np%s.fits' % outframe
      print " %s" % infile
      f_in.write('%s\n' % infile)
      f_lc.write('%s\n' % lcfile)
      f_np.write('%s\n' % ofile)
      lc_list.append(lcfile)
      #np_list.append(ofile)
   f_in.close()
   f_lc.close()
   f_np.close()

   """
   Run nirlin to do the non-linearity correction
   """
   print ''
   print 'Running nirlin to correct for non-linearities'
   print '---------------------------------------------'
   print ''
   for i in range(len(raw_list)):
      nirlin.nirlin(raw_list[i],outputfile=lc_list[i])
   #   if i==0:
   #      npstring = np_list[0]
   #   else:
   #      npstring += ',%s' % np_list[i]

   """ 
   Run pyraf task nprepare to convert format of raw files to the one that is
   expected for subsequent tasks.
   """
   print ""
   print "Running nprepare on linearized raw frames"
   print ""
   print "%s %s %s %s" %(rawdir,lincorname,npname,bpmfile)
   #print "%s" % npstring
   tmplc = '@%s' % lincorname
   tmpnp = '@%s' % npname
   iraf.nprepare(tmplc,outimages=tmpnp,bpm=bpmfile)

   """
   Run pyraf task nisky to make the initial sky frame
   """
   iraf.nisky(tmpnp,outimage=outname)
示例#15
0
from astropy import log
from astropy.table import Table

# + on 19/05/2017
from astropy.visualization import ZScaleInterval
zscale = ZScaleInterval()
from astropy.visualization.mpl_normalize import ImageNormalize

import aplpy # + on 19/05/2017

import glog # + on 09/01/2018

from pylab import subplots_adjust
bbox_props = dict(boxstyle="square,pad=0.15", fc="w", alpha=0.5, ec="none")

iraf.gemini(_doprint=0)
iraf.gemini.gnirs(_doprint=0)

log.info("Unlearning tasks")
iraf.gemini.unlearn()
iraf.gemini.gemtools.unlearn()
iraf.gemini.gnirs.unlearn()

iraf.set(stdimage="imt4096")
# iraf.gemini.nsheaders("gnirs")

co_dirname = os.path.dirname(__file__)

xorder = 3 # nsfitcoords fitting order along x

def get_database_model(path, source, get_lines=False, silent=False, verbose=True):
示例#16
0
def run():
    """
    Merge final cubes.
    """
    # Store current working directory for later use.
    path = os.getcwd()

    # Set up iraf
    iraf.gemini()
    iraf.nifs()
    iraf.gnirs()
    iraf.gemtools()

    # Unlearn the used tasks.
    iraf.unlearn(iraf.gemini,iraf.gemtools,iraf.gnirs,iraf.nifs)

    # Prepare the package for NIFS
    iraf.nsheaders("nifs",logfile="Nifty.log")
    iraf.set(stdimage='imt2048')
    user_clobber=iraf.envget("clobber")
    iraf.reset(clobber='yes')

    # Set up the logging file.
    log = os.getcwd()+'/Nifty.log'

    logging.info('\n#################################################')
    logging.info('#                                               #')
    logging.info('#       Start the NIFS Final Cube Merging       #')
    logging.info('#                                               #')
    logging.info('#################################################\n')

    # Load reduction parameters from ./config.cfg.
    with open('./config.cfg') as config_file:
        config = ConfigObj(config_file, unrepr=True)
        # Read general pipeline config.
        manualMode = config['manualMode']
        over = config['over']
        scienceDirectoryList = config['scienceDirectoryList']
        # Read baselineCalibrationReduction specfic config.
        mergeConfig = config['mergeConfig']
        start = mergeConfig['mergeStart']
        stop = mergeConfig['mergeStop']
        mergeType = mergeConfig['mergeType']
        use_pq_offsets = mergeConfig['use_pq_offsets']
        im3dtran = mergeConfig['im3dtran']

    valindex = start
    while valindex <= stop:
        # There are three types of merging to choose from. You can:

        if valindex == 1:
            # Merge uncorrected cubes. These have the "ctfbrsn" prefix.
            mergeCubes(scienceDirectoryList, "uncorrected", mergeType, use_pq_offsets, im3dtran, over)
            logging.info("\n##############################################################################")
            logging.info("")
            logging.info("  STEP 1 - Merge Uncorrected Individual Observations - COMPLETED ")
            logging.info("")
            logging.info("##############################################################################\n")

        if valindex == 2:
            # Merge merged cubes from each observation.
            finalMergeCubes(mergeType, over)
            logging.info("\n##############################################################################")
            logging.info("")
            logging.info("  STEP 2 - Merge Uncorrected Merged Observation Cubes - COMPLETED ")
            logging.info("")
            logging.info("##############################################################################\n")

        if valindex == 3:
            # Merge telluric corrected cubes. These have the "actfbrsn" prefix.
            mergeCubes(scienceDirectoryList, "telluricCorrected", mergeType, use_pq_offsets, im3dtran, over)
            logging.info("\n##############################################################################")
            logging.info("")
            logging.info("  STEP 3 - Merge Telluric Corrected Individual Observations - COMPLETED ")
            logging.info("")
            logging.info("##############################################################################\n")

        if valindex == 4:
            # Merge merged cubes from each observation.
            finalMergeCubes(mergeType, over)
            logging.info("\n##############################################################################")
            logging.info("")
            logging.info("  STEP 4 - Merge Telluric Corrected Merged Observation Cubes - COMPLETED ")
            logging.info("")
            logging.info("##############################################################################\n")

        if valindex == 5:
            # Merge telluric corrected AND flux calibrated cubes. These have the "factfbrsn" prefix.
            mergeCubes(scienceDirectoryList, "telCorAndFluxCalibrated", mergeType, use_pq_offsets, im3dtran, over)
            logging.info("\n##############################################################################")
            logging.info("")
            logging.info("  STEP 5 - Merge Telluric Corrected and Flux Calibrated Cubes - COMPLETED ")
            logging.info("")
            logging.info("##############################################################################\n")

        if valindex == 6:
            # Merge merged cubes from each observation.
            finalMergeCubes(mergeType, over)
            logging.info("\n##############################################################################")
            logging.info("")
            logging.info("  STEP 6 - Merge Telluric Corrected AND Flux Calibrated Cubes - COMPLETED ")
            logging.info("")
            logging.info("##############################################################################\n")

        valindex += 1
示例#17
0
def start(obsDirList, use_pq_offsets, im3dtran, over=""):
    """MERGE

    This module contains all the functions needed to merge
    the final data cubes.

    NOTE: If you wish to shift the cubes manually in QFits View
    you can combine them in this script by making sure that you
    attach the prefix "shif" to each shifted image and save them
    in the observation directory (ie. obs108). This is necessary
    for very faint objects.

    INPUT:
        - Reference data cubes
        - A list of paths where final data cubes are located
        - Transformed integral field spectra

    OUTPUT:
        - Merged cubes for each observation (ie. DATE_obs##(#).fits)
        - One final merged cube from entire observation program
    """

    # Store the current working directory so we can find our way back later on.
    path = os.getcwd()

    iraf.gemini()
    iraf.nifs()
    iraf.gnirs()
    iraf.gemtools()

    # Unlearn the used tasks.
    iraf.unlearn(iraf.gemini, iraf.gemtools, iraf.gnirs, iraf.nifs)

    # Prepare the package for NIFS
    iraf.nsheaders("nifs", logfile="Nifty.log")
    iraf.set(stdimage='imt2048')
    user_clobber = iraf.envget("clobber")
    iraf.reset(clobber='yes')

    # Set the default logfile for iraf tasks.
    # TODO: Set the logfile for all iraf tasks! Right now it is not logging their output because of im3dtran...
    # It seems im3dtran doesn't have a "log" parameter.
    log = "Nifty.log"

    # Change to the directory in iraf.
    iraffunctions.chdir(path)

    # Create some lists here.
    listsOfCubes = [
    ]  # List of lists of cubes (one list for each science observation directory).
    mergedCubes = [
    ]  # List of Merged cubes (one merged cube for each science observation directory).
    obsidlist = []  # List of science observation id s.

    # Pixel scale in arcseconds/pixel.
    pixScale = 0.05

    # TODO(ncomeau[*AT*]uvic.ca): implement a way to read and save cubelists to textfiles. It would be nice for users to
    # be able to edit the list of cubes to merge by hand.
    # If no Merged directory exists that contains a textfile list of cubes:
    # Go to each science directory and copy cubes from there to a new directory called Merged.
    for obsDir in obsDirList:
        # Get date, obsid and obsPath by splitting each science directory name.
        # Eg: directory name is ""/Users/ncomeau/research/newer-nifty/hd165459/20160705/H/obs13", then:
        # temp1 == ('/Users/ncomeau/research/newer-nifty/hd165459/20160705/H', 'obs13')
        # temp2 == ('/Users/ncomeau/research/newer-nifty/hd165459/20160705', 'H')
        # temp3 == ('/Users/ncomeau/research/newer-nifty/hd165459', '20160705')
        # temp4 == ('/Users/ncomeau/research/newer-nifty', 'hd165459')

        # TODO: make this clearer.

        temp1 = os.path.split(obsDir)
        temp2 = os.path.split(temp1[0])
        temp3 = os.path.split(temp2[0])
        temp4 = os.path.split(temp3[0])
        objname = temp4[1]
        date = temp3[1]
        obsid = temp1[1]
        obsPath = temp3[0]
        os.chdir(obsDir)
        obsidlist.append(date + '_' + obsid)

        # Create a directory called Merged and copy all the data cubes to this directory.
        if not os.path.exists(obsPath + '/Merged/'):
            os.mkdir(obsPath + '/Merged/')
            logging.info('I am creating a directory called Merged')

        Merged = obsPath + '/Merged'

        if not os.path.exists(Merged + '/' + date + '_' + obsid):
            os.mkdir(Merged + '/' + date + '_' + obsid)
            logging.info(
                'I am creating a directory with date and abs ID inside Merged '
            )

        # If a list called shiftedcubes already exists then just merge those shifted cubes and continue.
        if glob.glob("./shift*.fits"):
            if over:
                if os.path.exists('./' + obsid + '_merged.fits'):
                    os.remove('./' + obsid + '_merged.fits')
                    iraf.gemcube(input="shif*.fits[SCI]",
                                 output=obsid + '_merged',
                                 logfile=log)
            elif not os.path.exists('./' + obsid + '_merged.fits'):
                iraf.gemcube(input="shif*.fits[SCI]",
                             output=obsid + '_merged',
                             logfile=log)
            else:
                logging.info(
                    "Output exists and -over- not set - shifted cubes are not being merged"
                )
            shutil.copy('./' + obsid + '_merged.fits', Merged)
            if obsDir == obsDirList[-1]:
                return
            else:
                continue

        # Create a list called cubes, which stores all the cubes from a particular night.
        # Store all the cubes lists in a list of lists called listsOfCubes.
        # TODO: syntax is fairly ugly; there may be a better way to do this.
        cubes = glob.glob(
            'catfbrsnN*.fits'
        )  # Cubes order at this point is arbitrary so we need to sort.
        cubes.sort(key=lambda x: x[-8:-5]
                   )  # Sort cubes in increasing order by last three digits.

        if cubes:
            listsOfCubes.append(cubes)
        else:
            cubes = glob.glob('cptfbrsnN*.fits')
            if cubes:
                cubes.sort(
                    key=lambda x: x[-8:-5]
                )  # Sort cubes in increasing order by last three digits.
                listsOfCubes.append(cubes)
            else:
                cubes = glob.glob('ctfbrsnN*.fits')
                if cubes:
                    cubes.sort(
                        key=lambda x: x[-8:-5]
                    )  # Sort cubes in increasing order by last three digits.
                    listsOfCubes.append(cubes)
                else:
                    logging.info(
                        "\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
                    )
                    logging.info(
                        "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
                    )
                    logging.info("")
                    logging.info("     ERROR in merge: no cubes found!")
                    logging.info("")
                    logging.info(
                        "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
                    )
                    logging.info(
                        "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n"
                    )
                    raise SystemExit
        # Copy cubes to their respective data_obsid directory within Merged.
        for cube in cubes:
            shutil.copy(cube, Merged + '/' + date + '_' + obsid)

        os.chdir(Merged)

    n = 0
    for cubes in listsOfCubes:

        shiftlist = []
        os.chdir(Merged + '/' + obsidlist[n])
        iraffunctions.chdir(Merged + '/' + obsidlist[n])

        if use_pq_offsets:
            # Set the zero point p and q offsets to the p and q offsets of the first cube in each list of cubes.
            header = astropy.io.fits.open(cubes[0])
            p0 = header[0].header['POFFSET']
            q0 = header[0].header['QOFFSET']
            foff = open('offsets.txt', 'w')
            foff.write('%d %d %d\n' % (0, 0, 0))
            foff.close()

        suffix = cubes[0][-8:-5]
        if im3dtran:
            if os.path.exists('transcube' + suffix + '.fits'):
                if not over:
                    logging.info(
                        'Output already exists and -over- not set - skipping im3dtran'
                    )
                if over:
                    os.remove('transcube' + suffix + '.fits')
                    iraf.im3dtran(input=cubes[0] + '[SCI][*,*,-*]',
                                  new_x=1,
                                  new_y=3,
                                  new_z=2,
                                  output='transcube' + suffix)
            else:
                iraf.im3dtran(input=cubes[0] + '[SCI][*,*,-*]',
                              new_x=1,
                              new_y=3,
                              new_z=2,
                              output='transcube' + suffix)
        else:
            iraf.imcopy(cubes[0] + '[SCI][*,*,*]',
                        'NONtranscube' + suffix + '.fits')
        shiftlist.append('cube' + suffix + '.fits')
        iraffunctions.chdir(os.getcwd())

        for i in range(len(cubes)):
            # Skip the first cube!
            if i == 0:
                continue
            header2 = astropy.io.fits.open(cubes[i])
            suffix = cubes[i][-8:-5]

            # If user wants to merge using p and q offsets, grab those from .fits headers.
            if use_pq_offsets:
                # find the p and q offsets of the other cubes in the sequence.
                xoff = header2[0].header['POFFSET']
                yoff = header2[0].header['QOFFSET']
                # calculate the difference between the zero point offsets and the offsets of the other cubes and convert that to pixels
                xShift = round((xoff - p0) / pixScale)
                yShift = round((yoff - q0) / pixScale)
                # write all offsets to a text file (keep in mind that the x and y offsets use different pixel scales)
                foff = open('offsets.txt', 'a')
                if im3dtran:
                    # If we swap the y and lambda axis we must also write the offsets in x, lambda, y.
                    foff.write('%d %d %d\n' % (int(xShift), 0, int(yShift)))
                else:
                    # Write offsets in regular x, y, lambda.
                    foff.write('%d\t%d\t%d\n' % (xShift, yShift, 0.))
                foff.close()

            if im3dtran:
                prefix = 'transcube'
                if os.path.exists('transcube' + suffix + '.fits'):
                    if not over:
                        logging.info(
                            'Output already exists and -over- not set - skipping im3dtran'
                        )
                    if over:
                        os.remove('transcube' + suffix + '.fits')
                        iraf.im3dtran(input=cubes[i] + '[SCI][*,*,-*]',
                                      new_x=1,
                                      new_y=3,
                                      new_z=2,
                                      output='transcube' + suffix)
                else:
                    iraf.im3dtran(input=cubes[i] + '[SCI][*,*,-*]',
                                  new_x=1,
                                  new_y=3,
                                  new_z=2,
                                  output='transcube' + suffix)
            else:
                prefix = 'NONtranscube'
                iraf.imcopy(cubes[i] + '[SCI][*,*,*]',
                            prefix + suffix + '.fits')
            shiftlist.append('cube' + suffix + '.fits')

        if not use_pq_offsets:
            # Before we combine make sure a suitable offsets.txt file exists.
            a = raw_input(
                "\nPaused. Please provide a suitable offsets.txt file in ",
                Merged + '/' + obsidlist[n])
            while not os.path.exists('offsets.txt'):
                a = raw_input("No offsets.txt file found. Please try again.")
            logging.info('offsets.txt found successfully for', obsidlist[n])

        if os.path.exists('cube_merged.fits'):
            if over:
                os.remove('cube_merged.fits')
                iraf.imcombine(prefix + '*',
                               output='cube_merged.fits',
                               combine='median',
                               offsets='offsets.txt')
            else:
                logging.info(
                    'Output already exists and -over- not set - skipping imcombine'
                )
        else:
            iraf.imcombine(prefix + '*',
                           output='cube_merged.fits',
                           combine='median',
                           offsets='offsets.txt')
        if im3dtran:
            # Transpose the cube back to x, y, lambda.
            if os.path.exists('out.fits'):
                if over:
                    os.remove('out.fits')
                    iraf.im3dtran(input='cube_merged[*,-*,*]',
                                  new_x=1,
                                  new_y=3,
                                  new_z=2,
                                  output='out.fits')
                else:
                    logging.info(
                        'Output already exists and -over- not set - skipping final im3dtran'
                    )
            else:
                iraf.im3dtran(input='cube_merged[*,-*,*]',
                              new_x=1,
                              new_y=3,
                              new_z=2,
                              output='out.fits')
            iraf.fxcopy(input=cubes[0] + '[0], out.fits',
                        output=obsidlist[n] + '_merged.fits')
        else:
            iraf.fxcopy(input=cubes[0] + '[0], cube_merged.fits',
                        output=obsidlist[n] + '_merged.fits')
        mergedCubes.append(obsidlist[n] + '_merged.fits')

        n += 1
        os.chdir(Merged)

    # Copy the merged observation sequence data cubes to the Merged directory.
    for i in range(len(mergedCubes)):
        shutil.copy(Merged + '/' + obsidlist[i] + '/' + mergedCubes[i], './')

    # Merge all the individual merged observation sequence data cubes.
    # TODO: test. Still untested.
    """
示例#18
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-c',
                        '--check',
                        help='Checks if the calibration '
                        'exposures required are available in the raw '
                        'directory.',
                        action='store_true')
    parser.add_argument('-v',
                        '--verbose',
                        help='Prints the dictionary of '
                        'file associations.',
                        action='store_true')
    parser.add_argument('config_file', help='Configuration file for GIREDS')
    parser.add_argument('--incremental',
                        help='Skip already reduced cubes',
                        action='store_true')

    args = parser.parse_args()

    cal_categories_std = np.array(
        ['bias', 'flat', 'twilight', 'arc', 'twilight_flat', 'bpm'])

    cal_categories_sci = np.array([
        'bias', 'flat', 'twilight', 'arc', 'standard_star', 'twilight_flat',
        'bpm'
    ])

    if args.check:

        pip = pipeline(args.config_file)
        pip.dry_run = True
        pip.associate_files()

        filecheck(pip.std, cal_categories_std)

        filecheck(pip.sci, cal_categories_sci)

        if args.verbose:
            print(json.dumps(pip.std, indent=4))
            print(json.dumps(pip.sci, indent=4))

    else:
        iraf.gemini()
        iraf.unlearn('gemini')

        iraf.gemtools()
        iraf.unlearn('gemtools')
        pip = pipeline(args.config_file)

        if pip.apply_lacos:
            if pip.cfg.getboolean('reduction', 'fl_gscrrej'):
                cube_prefix = 'dxcstexlprg'
            else:
                cube_prefix = 'dxcstelprg'
        else:
            if pip.cfg.getboolean('reduction', 'fl_gscrrej'):
                cube_prefix = 'dxcstexprg'
            else:
                cube_prefix = 'dxcsteprg'

        ver_stamp = (50 * '#' + '\n' + 'GIREDS version hash: ' + pip.version +
                     '\n' + 50 * '#' + '\n')

        logfile = pip.run_dir + '/logfile.log'
        print('##################################################\n'
              '# GIREDS (Gmos Ifu REDuction Suite)              #\n'
              '##################################################\n'
              'Starting reduction at: {:s}\n'.format(time.asctime()))

        if (pip.reduction_step == 0) or\
                ((pip.single_step is False) and (pip.reduction_step >= 0)):

            print('Starting reduction step 0\n'
                  'on directory {:s}\n'.format(pip.raw_dir))

            pip.associate_files()

        if (pip.reduction_step == 1) or\
                ((pip.single_step is False) and (pip.reduction_step >= 1)):

            os.chdir(pip.run_dir)

            iraf.printlog(ver_stamp, logfile=logfile, verbose='yes')

            iraf.printlog('Starting reduction step 1\n'
                          'on directory {:s}\n'.format(os.getcwd()),
                          logfile=logfile,
                          verbose='yes')

            r = open('file_associations_sci.dat', 'r').read()
            pip.sci = eval(r)
            r = open('file_associations_std.dat', 'r').read()
            pip.std = eval(r)

            for star in pip.std:

                cube_file = pip.run_dir + cube_prefix + star['image']
                if args.incremental and isfile(cube_file):
                    print('Skipping already reduced cube {:s}{:s}'.format(
                        cube_prefix, star['image']))
                    continue

                cal = np.array([
                    True if star[i] != '' else False
                    for i in cal_categories_std
                ])

                if not cal.all():
                    iraf.printlog(
                        ('ERROR! Image {:s} does not have all the necessary'
                         'calibration files: ' +
                         len(cal[~cal]) * '{:s} ').format(
                             star['image'], *cal_categories_std[~cal]),
                        logfile=logfile,
                        verbose='yes')
                    iraf.printlog('Skipping image {:s}.'.format(star['image']),
                                  logfile=logfile,
                                  verbose='yes')
                    continue
                else:
                    try:
                        pip.stdstar(star)
                    except Exception as err:
                        iraf.printlog(err.__repr__(),
                                      logfile=logfile,
                                      verbose='yes')
                        iraf.printlog(
                            'ERROR! An error ocurred when trying to reduce '
                            'the standard star {:s}. Check logfile for more '
                            'information.'.format(star),
                            logfile=logfile,
                            verbose='yes')

        if (pip.reduction_step == 2) or\
                ((pip.single_step is False) and (pip.reduction_step >= 2)):

            iraf.printlog(ver_stamp, logfile=logfile, verbose='yes')

            os.chdir(pip.run_dir)
            iraf.printlog(
                'Starting reduction step 2 on directory {:s}\n'.format(
                    os.getcwd()),
                logfile=logfile,
                verbose='yes')

            r = open('file_associations_sci.dat', 'r').read()
            pip.sci = eval(r)
            # r = open('file_associations_std.dat', 'r').read()
            # pip.std = eval(r)

            for sci in pip.sci:

                cube_file = pip.run_dir + cube_prefix + sci['image']
                if args.incremental and isfile(cube_file):
                    print('Skipping already reduced cube {:s}{:s}'.format(
                        cube_prefix, sci['image']))
                    continue

                cal = np.array([
                    True if sci[i] != '' else False for i in cal_categories_sci
                ])

                if not cal.all():
                    iraf.printlog(
                        ('ERROR! Image {:s} does not have all the necessary\n'
                         'calibration files: ' +
                         len(cal[~cal]) * '{:s} ').format(
                             sci['image'], *cal_categories_sci[~cal]),
                        logfile=logfile,
                        verbose='yes')
                    iraf.printlog('Skipping image {:s}.'.format(sci['image']),
                                  logfile=logfile,
                                  verbose='yes')
                    continue
                else:
                    try:
                        pip.science(sci)
                    except Exception as err:
                        iraf.printlog(err.__repr__(),
                                      logfile=logfile,
                                      verbose='yes')
                        iraf.printlog(
                            'ERROR! An error ocurred when trying to reduce '
                            'the galaxy {:s}. Check logfile for more '
                            'information.'.format(sci),
                            logfile=logfile,
                            verbose='yes')

        if (pip.reduction_step == 3) or\
                ((pip.single_step is False) and (pip.reduction_step >= 3)):

            iraf.printlog(ver_stamp, logfile=logfile, verbose='yes')

            os.chdir(pip.run_dir)
            iraf.printlog(
                'Starting reduction step 3 on directory {:s}\n'.format(
                    os.getcwd()),
                logfile=logfile,
                verbose='yes')

            r = open('file_associations_sci.dat', 'r').read()
            pip.sci = eval(r)
            # r = open('file_associations_std.dat', 'r').read()
            # pip.std = eval(r)

            # List of objects
            listname = [(sci['object'].lower()).replace(' ', '')
                        for sci in pip.sci]
            sciname = list(set(listname))

            for name in sciname:

                sufix = '_HYPERCUBE.fits'
                cube_file = pip.run_dir + '/' + name + sufix

                if os.path.isfile(cube_file):
                    skipwarn(cube_file)
                    continue

                if args.incremental and isfile(cube_file):
                    print('Skipping already reduced cube {:s}{:s}'.format(
                        name, sufix))
                    continue

                sciobj = [
                    sci for m, sci in enumerate(pip.sci) if listname[m] == name
                ]

                # Prefix may change
                cubes = np.array([
                    True if os.path.isfile(cube_prefix +
                                           sci['image']) else False
                    for sci in sciobj
                ])

                if not cubes.all():
                    iraf.printlog((
                        'ERROR! Object {:s} does not have all the necessary\ncube files.'
                    ).format(name),
                                  logfile=logfile,
                                  verbose='yes')
                    iraf.printlog('Skipping {:s}.'.format(name),
                                  logfile=logfile,
                                  verbose='yes')
                    continue
                else:
                    try:
                        pip.merge(sciobj, name, cube_prefix)
                    except Exception as err:
                        iraf.printlog(err.__repr__(),
                                      logfile=logfile,
                                      verbose='yes')
                        iraf.printlog(
                            'ERROR! An error ocurred when trying to merge '
                            'the galaxy {:s}. Check logfile for more '
                            'information.'.format(name),
                            logfile=logfile,
                            verbose='yes')
示例#19
0
import sys
import glob
import shutil
import getopt
import os
import time
import logging
import pexpect as p
from pyraf import iraf
iraf.gemini()
iraf.nifs()
iraf.gnirs()
iraf.gemtools()
from pyraf import iraffunctions
from astropy.io import fits
from nifsDefs import datefmt, listit, writeList, checkLists, writeCenters, makeSkyList, MEFarith
from nifsTelluric import extrap1d, readCube, readSpec, telCor

#--------------------------------------------------------------------#
#                                                                    #
#     SCIENCE                                                        #
#                                                                    #
#     This module contains all the functions needed to reduce        #
#     the NIFS science images.                                       #
#                                                                    #
#    COMMAND LINE OPTIONS                                            #
#    If you wish to skip this script for science data                #
#    enter -n in the command line                                    #
#    If you wish to skip this script for telluric data               #
#    enter -k in the command line                                    #
#    Specify a start value with -b (default is 1)                    #
示例#20
0
def reduce_science(rawdir, rundir, flat, arc, twilight, twilight_flat, sciimg,
                   starimg, bias, overscan, vardq, observatory, lacos,
                   apply_lacos, lacos_xorder, lacos_yorder, lacos_sigclip,
                   lacos_objlim, bpm, instrument, slits, fl_gscrrej,
                   wltrim_frac, grow_gap, cube_bit_mask):
    """
    Reduction pipeline for standard star.

    Parameters
    ----------
    rawdir: string
        Directory containing raw images.
    rundir: string
        Directory where processed files are saved.
    flat: string
        Names of the files containing flat field images.
    arc: string
        Arc images.
    twilight: string
        Twilight flat images.
    twilight_flat: string
        Flat field for twilight image.
    starimg: string
        Name of the file containing the image to be reduced.
    bias: string
        Bias images.
    grow_gap: number
        Number of pixels by which to grow the bad pixel mask around
        the chip gaps.

    """

    iraf.set(stdimage='imtgmos')

    iraf.gemini()
    iraf.unlearn('gemini')

    iraf.gmos()
    iraf.unlearn('gmos')

    iraf.gemtools()
    iraf.unlearn('gemtools')

    # os.path.isfile('arquivo')

    iraf.unlearn('gemini')
    iraf.unlearn('gmos')

    iraf.task(lacos_spec=lacos)

    # set directories
    iraf.set(caldir=rawdir)  #
    iraf.set(rawdir=rawdir)  # raw files
    iraf.set(procdir=rundir)  # processed files

    iraf.gmos.logfile = 'logfile.log'
    iraf.gfextract.verbose = 'no'

    iraf.cd('procdir')

    flat = flat.replace('.fits', '')
    twilight = twilight.replace('.fits', '')
    twilight_flat = twilight_flat.replace('.fits', '')
    arc = arc.replace('.fits', '')
    starimg = starimg.replace('.fits', '')
    sciimg = sciimg.replace('.fits', '')
    mdffile = 'mdf' + flat + '.fits'

    iraf.gfreduce.bias = 'caldir$' + bias
    iraf.gfreduce.fl_fulldq = 'yes'
    iraf.gfreduce.fl_fixgaps = 'yes'
    iraf.gfreduce.grow = grow_gap
    iraf.gireduce.bpm = 'rawdir$' + bpm
    iraf.gfextract.verbose = 'no'

    cal_reduction(rawdir=rawdir,
                  rundir=rundir,
                  flat=flat,
                  arc=arc,
                  twilight=twilight,
                  bias=bias,
                  bpm=bpm,
                  overscan=overscan,
                  vardq=vardq,
                  instrument=instrument,
                  slits=slits,
                  twilight_flat=twilight_flat,
                  grow_gap=grow_gap)
    #
    #   Actually reduce science
    #
    image_name = 'rg' + sciimg + '.fits'
    if os.path.isfile(image_name):
        pipe.skipwarn(image_name)
    else:
        iraf.gfreduce(sciimg,
                      slits='header',
                      rawpath='rawdir$',
                      fl_inter='no',
                      fl_addmdf='yes',
                      key_mdf='MDF',
                      mdffile=mdffile,
                      weights='no',
                      fl_over=overscan,
                      fl_trim='yes',
                      fl_bias='yes',
                      trace='no',
                      recenter='no',
                      fl_fulldq='yes',
                      fl_flux='no',
                      fl_gscrrej='no',
                      fl_extract='no',
                      fl_gsappwave='no',
                      fl_wavtran='no',
                      fl_novl='yes',
                      fl_skysub='no',
                      fl_vardq=vardq,
                      mdfdir='procdir$')
    prefix = 'rg'

    # Gemfix
    image_name = 'p' + prefix + sciimg + '.fits'
    if os.path.isfile(image_name):
        pipe.skipwarn(image_name)
    else:
        iraf.gemfix(prefix + sciimg,
                    out='p' + prefix + sciimg,
                    method='fit1d',
                    bitmask=65535,
                    axis=1)
    prefix = 'p' + prefix

    # LA Cosmic - cosmic ray removal
    if apply_lacos:
        image_name = 'l' + prefix + sciimg + '.fits'
        if os.path.isfile(image_name):
            pipe.skipwarn(image_name)
        else:
            iraf.gemcrspec(prefix + sciimg,
                           out='l' + prefix + sciimg,
                           sigfrac=0.5,
                           niter=4,
                           fl_vardq=vardq,
                           xorder=lacos_xorder,
                           yorder=lacos_yorder,
                           sigclip=lacos_sigclip,
                           objlim=lacos_objlim)
        prefix = 'l' + prefix

    if fl_gscrrej:
        image_name = 'ex' + prefix + sciimg + '.fits'
    else:
        image_name = 'e' + prefix + sciimg + '.fits'

    if os.path.isfile(image_name):
        pipe.skipwarn(image_name)
    else:
        iraf.gfreduce(prefix + sciimg,
                      slits='header',
                      rawpath='./',
                      fl_inter='no',
                      fl_addmdf='no',
                      key_mdf='MDF',
                      mdffile=mdffile,
                      fl_over='no',
                      fl_trim='no',
                      fl_bias='no',
                      trace='no',
                      recenter='no',
                      fl_flux='no',
                      fl_gscrrej=fl_gscrrej,
                      fl_extract='yes',
                      fl_gsappwave='yes',
                      fl_wavtran='no',
                      fl_novl='no',
                      fl_skysub='no',
                      grow=grow_gap,
                      reference='eprg' + flat,
                      weights='no',
                      wavtraname='eprg' + arc,
                      response='eprg' + flat + '_response.fits',
                      fl_vardq=vardq,
                      fl_fulldq='yes',
                      fl_fixgaps='yes')

    if fl_gscrrej:
        prefix = 'ex' + prefix
    else:
        prefix = 'e' + prefix

    # if wl2 > 7550.0:
    #     wl2 = 7550.0

    #
    #   Apply wavelength transformation
    #

    wl1, wl2 = wl_lims(prefix + sciimg + '.fits', wltrim_frac)

    image_name = 't' + prefix + sciimg + '.fits'
    if os.path.isfile(image_name):
        pipe.skipwarn(image_name)
    else:
        iraf.gftransform(
            prefix + sciimg,
            wavtraname='eprg' + arc,
            fl_vardq=vardq,
            w1=wl1,
            w2=wl2,
        )

    prefix = 't' + prefix
    #
    #   Sky subtraction
    #
    image_name = 's' + prefix + sciimg + '.fits'
    if os.path.isfile(image_name):
        pipe.skipwarn(image_name)
    else:
        iraf.gfskysub(
            prefix + sciimg,
            expr='default',
            combine='median',
            reject='avsigclip',
            scale='none',
            zero='none',
            weight='none',
            sepslits='yes',
            fl_inter='no',
            lsigma=1,
            hsigma=1,
        )

    prefix = 's' + prefix
    #
    #   Apply flux calibration to galaxy
    #
    image_name = 'c' + prefix + sciimg + '.fits'
    if os.path.isfile(image_name):
        pipe.skipwarn(image_name)
    else:
        iraf.gscalibrate(prefix + sciimg,
                         sfuncti=starimg,
                         extinct='onedstds$ctioextinct.dat',
                         observatory=observatory,
                         fluxsca=1,
                         fl_vardq=vardq)
    prefix = 'c' + prefix
    #
    # Remove spurious data with PCA
    #
    image_name = 'x' + prefix + sciimg + '.fits'
    print(os.getcwd())
    print(image_name)
    if os.path.isfile(image_name):
        pipe.skipwarn(image_name)
    else:
        t = pca.Tomography(prefix + sciimg + '.fits')
        t.decompose()
        t.remove_cosmic_rays(sigma_threshold=10.0)
        t.write(image_name)
    prefix = 'x' + prefix
    #
    #   Create data cubes
    #
    image_name = 'd' + prefix + sciimg + '.fits'
    if os.path.isfile(image_name):
        pipe.skipwarn(image_name)
    else:
        data_cube = CubeBuilder(prefix + sciimg + '.fits')
        data_cube.build_cube()
        data_cube.fit_refraction_function()
        data_cube.fix_atmospheric_refraction()
        data_cube.write(image_name)
示例#21
0
def main():
    iraf.set(stdimage='imtgmos')

    iraf.gemini()
    iraf.gmos()

    # set directories
    iraf.set(rawdir='/dados/gmos/raw')  # raw files
    iraf.set(
        procdir='/dados/gmos/reduction/products/ngc7213/')  # processed files

    iraf.gmos.logfile = 'logfile.log'
    iraf.gfextract.verbose = 'no'

    iraf.cd('procdir')

    for task in ['gemini', 'gmos', 'gfextract']:
        iraf.unlearn(task)

    flat = 'S20110927S0062'

    for name in glob.glob('database/apeprg' + flat + '*'):
        if os.path.isfile(name):
            print('Removing file {:s}'.format(name))
            os.remove(name)

    if os.path.isfile('eprg' + flat + '.fits'):
        os.remove('eprg' + flat + '.fits')

    grow_gap = 1
    vardq = 'yes'

    ap = auto_apertures.AutoApertures('prg' + flat + '.fits')
    ap.find_dead_beams()
    ap.fix_mdf()

    iraf.delete('eprg' + flat + '.fits')
    extract_args = {
        'inimage': 'prg' + flat,
        'exslits': '*',
        'trace': 'yes',
        'recenter': 'yes',
        'order': 9,
        't_nsum': 50,
        'function': 'chebyshev',
        'fl_novl': 'no',
        'fl_fulldq': vardq,
        'fl_gnsskysub': 'no',
        'fl_fixnc': 'no',
        'fl_fixgaps': 'yes',
        'fl_vardq': 'yes',
        'grow': grow_gap,
        'fl_inter': 'no',
        'verbose': 'no'
    }

    iraf.gfextract(**extract_args)
    sys.exit()

    time_out = 0
    while (ap.check_iraf('database/apeprg' + flat) != 0) and (time_out < 5):
        ap.fix_mdf()
        print('Aperture iteration #{:d}.'.format(time_out))
        iraf.delete('eprg' + flat + '.fits')
        iraf.delete('database/apeprg' + flat + '*')

        extract_args['fl_inter'] = 'yes'
        iraf.gfextract(**extract_args)

        time_out += 1
示例#22
0
def make_flat(on_frames, off_frames, rawroot, band, rawdir='.'):
   """
   Makes an imaging flat-field file using the niflat task in pyraf
   """

   from pyraf import iraf
   iraf.gemini()
   iraf.niri()
   iraf.unlearn('nprepare')
   iraf.unlearn('niflat')

   """
   Set up the file lists that will be used in the various tasks
   """

   rawname = 'flats_%s_all_raw.list' % band
   npname  = 'flats_%s_all_nprepare.list' % band
   onname  = 'flats_%s_on.list' % band
   offname = 'flats_%s_off.list' % band
   outflat = 'Flat_%s.fits' % band

   """
   Create the lists
   """

   print ""
   print "Generating list of lamps-on files"
   print "---------------------------------"
   all_list = []
   f_in  = open(rawname,'w')
   f_np  = open(npname,'w')
   f_on  = open(onname,'w')
   f_off = open(offname,'w')
   for i in on_frames:
      infile = '%s%s.fits' % (rawroot,i)
      ofile  = 'np%s.fits' % i
      print " %s" % infile
      f_in.write('%s\n' % infile)
      f_np.write('%s\n' % ofile)
      f_on.write('%s\n' % ofile)
      all_list.append(ofile)
   print ""
   print "Generating list of lamps-off files"
   print "----------------------------------"
   for i in off_frames:
      infile = '%s%s.fits' % (rawroot,i)
      ofile  = 'np%s.fits' % i
      print " %s" % infile
      f_in.write('%s\n' % infile)
      f_np.write('%s\n' % ofile)
      f_off.write('%s\n' % ofile)
      all_list.append(ofile)
   f_in.close()
   f_np.close()
   f_on.close()
   f_off.close()

   """ 
   Run pyraf task nprepare to convert format of raw files to the one that is
   expected for subsequent tasks.
   """
   print ""
   print "Running nprepare on raw flat frames"
   print ""
   iraf.nprepare('@%s' %rawname,rawpath=rawdir,outimages='@%s'%npname)

   """
   Run pyraf task niflat to actually make the flat-field file
   """
   iraf.niflat('@%s'%onname,flatfile=outflat,lampsoff='@%s'%offname)


   """ Clean up """
   for i in all_list:
      os.remove(i)

   """ Give some information about the output file """
   print ""
   print "New flatfield file summary"
   print "------------------------------------------------------------------"
   flathdu = pf.open(outflat)
   flathdu.info()
   flathdu.close()
示例#23
0
def merge_cubes(rawdir, rundir, name, observatory, imgcube, xoff, yoff, crval3,
                cdelt3, cdelt1):
    """
    Merge cubes.

    Parameters
    ----------
    rawdir: string
        Directory containing raw images.
    rundir: string
        Directory where processed files are saved.
    name: string
        Name of the object.
    observatory: string
        Gemini-South/Gemini-North.
    imgcube: list of strings
        Cube file for each object cube.
    xoff: list of floats
        x-offset for each object cube.
    yoff: list of floats
        y-offset for each object cube.
    crval3: list of floats
        crval3 for each object cube.
    cdelt3: list of floats
        cdelt3 for each object cube.
    cdelt1: list of floats
        cdelt1 for each object cube.
    """

    rundir = rundir + '/'

    iraf.set(stdimage='imtgmos')

    iraf.gemini()
    iraf.unlearn('gemini')

    iraf.gmos()
    iraf.unlearn('gmos')

    iraf.gemtools()
    iraf.unlearn('gemtools')

    iraf.gmos.logfile = 'logfile.log'
    iraf.gemtools.gloginit.logfile = 'logfile.log'

    # set directories
    iraf.set(caldir=rawdir)  #
    iraf.set(rawdir=rawdir)  # raw files
    iraf.set(procdir=rundir)  # processed files

    iraf.cd('procdir')

    #
    #   Creation of file/offset files
    #
    nCubes = len(imgcube)

    in_filesSCI = 'files_' + name + '_SCI'
    in_filesVAR = 'files_' + name + '_VAR'
    in_offset = 'offsets_' + name

    with open(in_filesSCI, 'w') as f:
        for img in imgcube:
            f.write(rundir + img + '[1]' + '\n')
    with open(in_filesVAR, 'w') as f:
        for img in imgcube:
            f.write(rundir + img + '[2]' + '\n')

    # Invert (x,y)offsets if in gemini-north
    sign = -1 if (observatory.lower() == 'gemini-north') else 1
    with open(in_offset, 'w') as f:
        for k in range(nCubes):
            f.write("{:.5f} {:.5f} {:.5f}\n".format(
                sign * (xoff[k] - xoff[0]) / cdelt1[k],
                sign * (yoff[k] - yoff[0]) / cdelt1[k],
                (crval3[k] - crval3[0]) / cdelt3[k]))

    #
    #   Definition of in/output files. And header modification.
    #
    in_sci = [img + '[1]' for img in imgcube]
    in_var = [img + '[2]' for img in imgcube]
    in_dq = [img + '[3]' for img in imgcube]
    out_sci = name + '_SCI.fits'
    out_var = name + '_VAR.fits'
    out_sigIN = name + '_SIGIN.fits'
    out_exp = name + '_EXP'

    # Convert DQ extension to 'pl' and add the its filename to 'bpm' key
    # --- Change to other key. (Other rotines use this key) - Improve
    # --- Change also the key for bpm used by 'fixpix' ------ Improve
    out_dqPL = [img[:-5] + '_DQ.pl' for img in imgcube]

    for k in range(nCubes):
        print(in_sci[k], in_var[k], in_dq[k], out_dqPL[k])
        iraf.imcopy(in_dq[k], out_dqPL[k])
        iraf.hedit(in_sci[k], 'BPM', out_dqPL[k], add='yes', verify='no')
        iraf.hedit(in_var[k], 'BPM', out_dqPL[k], add='yes', verify='no')

    #
    #   Merge sci/var cubes
    #
    iraf.imcombine("@" + in_filesSCI,
                   out_sci,
                   offsets=in_offset,
                   combine='average',
                   reject='avsigclip',
                   masktype='goodvalue',
                   maskvalue=0,
                   expmasks=out_exp,
                   sigmas=out_sigIN)

    iraf.imcombine("@" + in_filesVAR,
                   out_var,
                   offsets=in_offset,
                   combine='sum',
                   reject='none',
                   masktype='goodvalue',
                   maskvalue=0)

    #
    #   Criate correct error cube
    #
    iraf.imcopy(out_exp + '.pl', out_exp.replace('.pl', '.fits'))

    # Read cubes
    sci_cube = pf.getdata(out_sci)
    var_cube = pf.getdata(out_var)
    sigIN_cube = pf.getdata(out_sigIN)
    exp_cube = pf.getdata(out_exp + '.fits')

    # --- Identify problem with negative values ---- Improve
    # RuntimeWarning: invalid value encountered in divide
    exp_MASK = np.ma.array(exp_cube, mask=(exp_cube == 0))
    err_cube = np.sqrt(abs(var_cube / exp_MASK**2).data)

    #
    #   Criate hypercube
    #
    # ---- Maybe don't need header for each extension -- Improve
    pry = pf.PrimaryHDU(header=pf.getheader(out_sci))
    hdu1 = pf.ImageHDU(sci_cube, header=pf.getheader(out_sci), name='SCI')
    hdu2 = pf.ImageHDU(err_cube, header=pf.getheader(out_var), name='ERR')
    hdu4 = pf.ImageHDU(sigIN_cube,
                       header=pf.getheader(out_sigIN),
                       name='SIG_IN')
    hdu3 = pf.ImageHDU(exp_cube,
                       header=pf.getheader(out_exp + '.fits'),
                       name='NCUBE')

    hdu = pf.HDUList([pry, hdu1, hdu2, hdu3, hdu4])
    hdu.writeto(name + '_HYPERCUBE.fits')
示例#24
0
@author: cmccully
'''
import os, shutil
from glob import glob
import pyfits
import numpy as np
from astroscrappy import detect_cosmics
from pyraf import iraf
from scipy import interpolate, ndimage, signal, optimize
import pf_model as pfm
import statsmodels as sm
from astropy.modeling import models, fitting
import astropy

iraf.cd(os.getcwd())
iraf.gemini()
iraf.gmos()
iraf.onedspec()

bluecut = 3450

iraf.gmos.logfile = "log.txt"
iraf.gmos.mode = 'h'
iraf.set(clobber='yes')

iraf.set(stdimage='imtgmos')

dooverscan = False
is_GS = False

def normalize_fitting_coordinate(x):
示例#25
0
import getopt

import os, glob, shutil, logging

import pexpect as p

import time

from pyraf import iraf

iraf.gemini()

iraf.nifs()

iraf.gnirs()

iraf.gemtools()

from pyraf import iraffunctions

from astropy.io import fits

from nifsDefs import datefmt, writeList, listit


#--------------------------------------------------------------------#

#                                                                    #

#     MERGE                                                          #
示例#26
0
def start(kind, telluricDirectoryList="", scienceDirectoryList=""):
    """

    start(kind): Do a full reduction of either Science or Telluric data.

    nifsReduce- for the telluric and science data reduction.

    Reduces NIFS telluric and science frames and attempts a flux calibration.

    Parameters are loaded from runtimeData/config.cfg. This script will
    automatically detect if it is being run on telluric data or science data.

    There are 6 steps.

    INPUT:
    + Raw files
        - Science frames
        - Sky frames
    + Calibration files
        - MDF shift file
        - Bad Pixel Mask (BPM)
        - Flat field frame
        - Reduced arc frame
        - Reduced ronchi mask frame
        - arc and ronchi database/ files

    OUTPUT:
        - If telluric reduction an efficiency spectrum used to telluric correct and absolute flux
          calibrate science frames
        - If science reduction a reduced science data cube.

    Args:
        kind (string): either 'Telluric' or 'Science'.
        telluricDirectoryList (string): Used by low memory pipeline.
        scienceDirectoryList (string): Used by low memory pipeline.

    """

    # TODO(nat): Right now the pipeline will crash if you decide to skip, say, doing a bad
    # pixel correction. This is because each step adds a prefix to the frame name, and most following
    # steps depend on that prefix being there.
    # One way to fix this is if a step is to be skipped, iraf.copy() is called instead to copy the frame and
    # add the needed prefix. Messy but it might work for now.

    ###########################################################################
    ##                                                                       ##
    ##                  BEGIN - GENERAL REDUCTION SETUP                      ##
    ##                                                                       ##
    ###########################################################################

    # Store current working directory for later use.
    path = os.getcwd()

    # Set up the logging file.
    log = os.getcwd() + '/Nifty.log'

    logging.info('\n#################################################')
    logging.info('#                                               #')
    logging.info('# Start the NIFS Science and Telluric Reduction #')
    logging.info('#                                               #')
    logging.info('#################################################\n')

    # Set up/prepare IRAF.
    iraf.gemini()
    iraf.gemtools()
    iraf.gnirs()
    iraf.nifs()

    # Reset to default parameters the used IRAF tasks.
    iraf.unlearn(iraf.gemini, iraf.gemtools, iraf.gnirs, iraf.nifs,
                 iraf.imcopy)

    # From http://bishop.astro.pomona.edu/Penprase/webdocuments/iraf/beg/beg-image.html:
    # Before doing anything involving image display the environment variable
    # stdimage must be set to the correct frame buffer size for the display
    # servers (as described in the dev$graphcap file under the section "STDIMAGE
    # devices") or to the correct image display device. The task GDEVICES is
    # helpful for determining this information for the display servers.
    iraf.set(stdimage='imt2048')

    # Prepare the IRAF package for NIFS.
    # NSHEADERS lists the header parameters used by the various tasks in the
    # NIFS package (excluding headers values which have values fixed by IRAF or
    # FITS conventions).
    iraf.nsheaders("nifs", logfile=log)

    # Set clobber to 'yes' for the script. This still does not make the gemini
    # tasks overwrite files, so:
    # YOU WILL LIKELY HAVE TO REMOVE FILES IF YOU RE_RUN THE SCRIPT.
    user_clobber = iraf.envget("clobber")
    iraf.reset(clobber='yes')

    # This helps make sure all variables are initialized to prevent bugs.
    scienceSkySubtraction = None
    scienceOneDExtraction = None
    extractionXC = None
    extractionYC = None
    extractionRadius = None
    telluricSkySubtraction = None

    # Load reduction parameters from runtimeData/config.cfg.
    with open('./config.cfg') as config_file:
        config = ConfigObj(config_file, unrepr=True)
        # Read general pipeline config.
        over = config['over']
        manualMode = config['manualMode']
        calDirList = config['calibrationDirectoryList']
        scienceOneDExtraction = config['scienceOneDExtraction']
        extractionXC = config['extractionXC']
        extractionYC = config['extractionYC']
        extractionRadius = config['extractionRadius']

        if kind == 'Telluric':
            # Telluric reduction specific config.
            telluricReductionConfig = config['telluricReductionConfig']
            if telluricDirectoryList:
                observationDirectoryList = telluricDirectoryList
            elif not telluricDirectoryList:
                observationDirectoryList = config['telluricDirectoryList']
            start = telluricReductionConfig['telStart']
            stop = telluricReductionConfig['telStop']
            telluricSkySubtraction = telluricReductionConfig[
                'telluricSkySubtraction']

        if kind == 'Science':
            # Science reduction specific config.
            scienceReductionConfig = config['scienceReductionConfig']
            if scienceDirectoryList:
                observationDirectoryList = scienceDirectoryList
            elif not scienceDirectoryList:
                observationDirectoryList = config['scienceDirectoryList']
            start = scienceReductionConfig['sciStart']
            stop = scienceReductionConfig['sciStop']
            scienceSkySubtraction = scienceReductionConfig[
                'scienceSkySubtraction']

    ###########################################################################
    ##                                                                       ##
    ##                 COMPLETE - GENERAL REDUCTION SETUP                    ##
    ##                                                                       ##
    ###########################################################################

    # nifsReduce has two nested loops that reduced data.
    # It loops through each science (or telluric) directory, and
    # runs through a series of calibrations steps on the data in that directory.

    # Loop through all the observation (telluric or science) directories to perform a reduction on each one.
    for observationDirectory in observationDirectoryList:

        ###########################################################################
        ##                                                                       ##
        ##                  BEGIN - OBSERVATION SPECIFIC SETUP                   ##
        ##                                                                       ##
        ###########################################################################

        # Print the current directory of data being reduced.
        logging.info(
            "\n#################################################################################"
        )
        logging.info("                                   ")
        logging.info("  Currently working on reductions in")
        logging.info("  in " + str(observationDirectory))
        logging.info("                                   ")
        logging.info(
            "#################################################################################\n"
        )

        os.chdir(observationDirectory)
        tempObs = observationDirectory.split(os.sep)
        obsid = tempObs[-1]

        # Change the iraf directory to the current directory.
        pwd = os.getcwd()
        iraffunctions.chdir(pwd)

        # Copy relevant calibrations over to the science directory.
        # Open and store the name of the MDF shift reference file from shiftfile into shift.
        shift = 'calibrations/shiftFile'
        # Open and store the name of the flat frame
        flat = 'calibrations/finalFlat'
        # Open and store the bad pixel mask
        finalBadPixelMask = 'calibrations/finalBadPixelMask'
        # Ronchi, arc and database must all be in local calibrations directory
        # Open and store the name of the reduced spatial correction ronchi flat frame name from ronchifile in ronchi.
        ronchi = 'finalRonchi'
        # Open and store the name of the reduced wavelength calibration arc frame from arclist in arc.
        arc = 'finalArc'

        if os.path.exists(os.getcwd() + '/' + ronchi + ".fits"):
            if over:
                iraf.delete(os.getcwd() + '/calibrations/finalRonchi.fits')
                # Copy the spatial calibration ronchi flat frame from Calibrations_grating to the observation directory.
                shutil.copy(os.getcwd() + '/calibrations/finalRonchi.fits',
                            ronchi + '.fits')
            else:
                print "\nOutput exists and -over not set - skipping copy of reduced ronchi"
        else:
            shutil.copy(os.getcwd() + '/calibrations/finalRonchi.fits',
                        ronchi + '.fits')

        if os.path.exists(os.getcwd() + '/' + arc + ".fits"):
            if over:
                iraf.delete(os.getcwd() + '/calibrations/finalArc.fits')
                # Copy the spatial calibration arc flat frame from Calibrations_grating to the observation directory.
                shutil.copy(os.getcwd() + '/calibrations/finalArc.fits',
                            arc + '.fits')
            else:
                print "\nOutput exists and -over not set - skipping copy of reduced arc"
        else:
            shutil.copy(os.getcwd() + '/calibrations/finalArc.fits',
                        arc + '.fits')
        # Make sure the database files are in place. Current understanding is that
        # these should be local to the reduction directory, so need to be copied from
        # the calDir.
        if os.path.isdir("./database"):
            if over:
                shutil.rmtree("./database")
                os.mkdir("./database")
                for item in glob.glob("calibrations/database/*"):
                    shutil.copy(item, "./database/")
            else:
                print "\nOutput exists and -over not set - skipping copy of database directory"
        else:
            os.mkdir('./database/')
            for item in glob.glob("calibrations/database/*"):
                shutil.copy(item, "./database/")

        if telluricSkySubtraction or scienceSkySubtraction:
            # Read the list of sky frames in the observation directory.
            try:
                skyFrameList = open("skyFrameList", "r").readlines()
                skyFrameList = [frame.strip() for frame in skyFrameList]
            except:
                logging.info(
                    "\n#####################################################################"
                )
                logging.info(
                    "#####################################################################"
                )
                logging.info("")
                logging.info(
                    "     WARNING in reduce: No sky frames were found in a directory."
                )
                logging.info("              Please make a skyFrameList in: " +
                             str(os.getcwd()))
                logging.info("")
                logging.info(
                    "#####################################################################"
                )
                logging.info(
                    "#####################################################################\n"
                )
                raise SystemExit
            sky = skyFrameList[0]

        # If we are doing a telluric reduction, open the list of telluric frames in the observation directory.
        # If we are doing a science reduction, open the list of science frames in the observation directory.
        if kind == 'Telluric':
            tellist = open('tellist', 'r').readlines()
            tellist = [frame.strip() for frame in tellist]
        elif kind == 'Science':
            scienceFrameList = open("scienceFrameList", "r").readlines()
            scienceFrameList = [frame.strip() for frame in scienceFrameList]
            # For science frames, check to see if the number of sky frames matches the number of science frames.
            # IF NOT duplicate the sky frames and rewrite the sky file and skyFrameList.
            if scienceSkySubtraction:
                if not len(skyFrameList) == len(scienceFrameList):
                    skyFrameList = makeSkyList(skyFrameList, scienceFrameList,
                                               observationDirectory)

        ###########################################################################
        ##                                                                       ##
        ##                 COMPLETE - OBSERVATION SPECIFIC SETUP                 ##
        ##                BEGIN DATA REDUCTION FOR AN OBSERVATION                ##
        ##                                                                       ##
        ###########################################################################

        # Check start and stop values for reduction steps. Ask user for a correction if
        # input is not valid.
        valindex = start
        while valindex > stop or valindex < 1 or stop > 6:
            logging.info(
                "\n#####################################################################"
            )
            logging.info(
                "#####################################################################"
            )
            logging.info("")
            logging.info(
                "     WARNING in reduce: invalid start/stop values of observation"
            )
            logging.info("                           reduction steps.")
            logging.info("")
            logging.info(
                "#####################################################################"
            )
            logging.info(
                "#####################################################################\n"
            )

            valindex = int(
                raw_input(
                    "\nPlease enter a valid start value (1 to 7, default 1): ")
            )
            stop = int(
                raw_input(
                    "\nPlease enter a valid stop value (1 to 7, default 7): "))

        while valindex <= stop:

            ###########################################################################
            ##  STEP 1: Prepare raw data; science, telluric and sky frames ->n       ##
            ###########################################################################

            if valindex == 1:
                if manualMode:
                    a = raw_input(
                        "About to enter step 1: locate the spectrum.")
                if kind == 'Telluric':
                    tellist = prepare(tellist, shift, finalBadPixelMask, log,
                                      over)
                elif kind == 'Science':
                    scienceFrameList = prepare(scienceFrameList, shift,
                                               finalBadPixelMask, log, over)
                if telluricSkySubtraction or scienceSkySubtraction:
                    skyFrameList = prepare(skyFrameList, shift,
                                           finalBadPixelMask, log, over)
                logging.info(
                    "\n##############################################################################"
                )
                logging.info("")
                logging.info(
                    "  STEP 1: Locate the Spectrum (and prepare raw data) ->n - COMPLETED "
                )
                logging.info("")
                logging.info(
                    "##############################################################################\n"
                )

            ###########################################################################
            ##  STEP 2: Sky Subtraction ->sn                                         ##
            ###########################################################################

            elif valindex == 2:
                if manualMode:
                    a = raw_input("About to enter step 2: sky subtraction.")
                # Combine telluric sky frames.
                if kind == 'Telluric':
                    if telluricSkySubtraction:
                        if len(skyFrameList) > 1:
                            combineImages(skyFrameList, "gn" + sky, log, over)
                        else:
                            copyImage(skyFrameList, 'gn' + sky + '.fits', over)
                        skySubtractTel(tellist, "gn" + sky, log, over)
                    else:
                        for image in tellist:
                            iraf.copy('n' + image + '.fits',
                                      'sn' + image + '.fits')

                if kind == 'Science':
                    if scienceSkySubtraction:
                        skySubtractObj(scienceFrameList, skyFrameList, log,
                                       over)
                    else:
                        for image in scienceFrameList:
                            iraf.copy('n' + image + '.fits',
                                      'sn' + image + '.fits')

                logging.info(
                    "\n##############################################################################"
                )
                logging.info("")
                logging.info("  STEP 2: Sky Subtraction ->sn - COMPLETED ")
                logging.info("")
                logging.info(
                    "##############################################################################\n"
                )

            ##############################################################################
            ##  STEP 3: Flat field, slice, subtract dark and correct bad pixels ->brsn  ##
            ##############################################################################

            elif valindex == 3:
                if manualMode:
                    a = raw_input(
                        "About to enter step 3: flat fielding and bad pixels correction."
                    )
                if kind == 'Telluric':
                    applyFlat(tellist, flat, log, over, kind)
                    fixBad(tellist, log, over)
                elif kind == 'Science':
                    applyFlat(scienceFrameList, flat, log, over, kind)
                    fixBad(scienceFrameList, log, over)
                logging.info(
                    "\n##############################################################################"
                )
                logging.info("")
                logging.info(
                    "  STEP 3: Flat fielding and Bad Pixels Correction ->brsn - COMPLETED "
                )
                logging.info("")
                logging.info(
                    "##############################################################################\n"
                )

            ###########################################################################
            ##  STEP 4: Derive and apply 2D to 3D transformation ->tfbrsn            ##
            ###########################################################################

            elif valindex == 4:
                if manualMode:
                    a = raw_input(
                        "About to enter step 4: 2D to 3D transformation and Wavelength Calibration."
                    )
                if kind == 'Telluric':
                    fitCoords(tellist, arc, ronchi, log, over, kind)
                    transform(tellist, log, over)
                elif kind == 'Science':
                    fitCoords(scienceFrameList, arc, ronchi, log, over, kind)
                    transform(scienceFrameList, log, over)
                logging.info(
                    "\n##############################################################################"
                )
                logging.info("")
                logging.info(
                    "  STEP 4: 2D to 3D transformation and Wavelength Calibration ->tfbrsn - COMPLETED "
                )
                logging.info("")
                logging.info(
                    "##############################################################################\n"
                )

            ############################################################################
            ##  STEP 5 (tellurics): For telluric data derive a telluric               ##
            ##                     correction ->gxtfbrsn                              ##
            ##  STEP 5 (science): For science apply an efficiency correction and make ##
            ##           a data cube (not necessarily in that order).                 ##
            ##           (i) Python method applies correction to nftransformed cube.  ##
            ##           Good for faint objects.                        ->cptfbrsn    ##
            ##           (ii) iraf.telluric method applies correction to              ##
            ##           nftransformed result (not quite a data cube) then            ##
            ##           nftransforms cube.                             ->catfbrsn    ##
            ##           (iii) If no telluric correction/flux calibration to be       ##
            ##           applied make a plain data cube.                ->ctfbrsn     ##
            ############################################################################

            elif valindex == 5:
                if manualMode:
                    a = raw_input("About to enter step 5.")
                # For telluric data:
                # Make a combined extracted 1D standard star spectrum.
                if kind == 'Telluric':
                    extractOneD(tellist, kind, log, over, extractionXC,
                                extractionYC, extractionRadius)

                    # TODO(nat): add this as a parameter; encapsulate this.
                    copyToScience = True
                    if copyToScience:
                        # Copy final extracted results to science directory.
                        try:
                            with open("scienceMatchedTellsList", "r") as f:
                                lines = f.readlines()
                            lines = [x.strip() for x in lines]

                            for i in range(len(lines)):
                                if "obs" in lines[i]:
                                    k = 1
                                    while i + k != len(
                                            lines) and "obs" not in lines[i +
                                                                          k]:
                                        copyResultsToScience(
                                            "gxtfbrsn" + tellist[0] + ".fits",
                                            "0_tel" + lines[i + k] + ".fits",
                                            over)
                                        k += 1
                        except IOError:
                            logging.info(
                                "\nNo scienceMatchedTellsList found in " +
                                os.getcwd() +
                                " . Skipping copy of extracted spectra to science directory."
                            )

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info(
                        "  STEP 5a: Extract 1D Spectra and Make Combined 1D Standard Star Spectrum"
                    )
                    logging.info("           ->gxtfbrsn - COMPLETED")
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )
                    #TODO(nat): add this as a parameter.
                    makeTelluricCube = True
                    if makeTelluricCube:
                        makeCube('tfbrsn', tellist, log, over)
                        logging.info(
                            "\n##############################################################################"
                        )
                        logging.info("")
                        logging.info(
                            "  STEP 5b: Make uncorrected standard star data cubes, ->ctfbrsn  - COMPLETED"
                        )
                        logging.info("")
                        logging.info(
                            "##############################################################################\n"
                        )

                # For Science data:
                # Possibly extract 1D spectra, and make uncorrected cubes.
                elif kind == 'Science':
                    if scienceOneDExtraction:
                        extractOneD(scienceFrameList, kind, log, over,
                                    extractionXC, extractionYC,
                                    extractionRadius)
                        copyExtracted(scienceFrameList, over)
                        logging.info(
                            "\n##############################################################################"
                        )
                        logging.info("")
                        logging.info(
                            "  STEP 5a: Make extracted 1D Science spectra, ->ctgbrsn  - COMPLETED"
                        )
                        logging.info("")
                        logging.info(
                            "##############################################################################\n"
                        )
                    makeCube('tfbrsn', scienceFrameList, log, over)

                    # TODO(nat): encapsulate this inside a function.
                    if os.path.exists('products_uncorrected'):
                        if over:
                            shutil.rmtree('products_uncorrected')
                            os.mkdir('products_uncorrected')
                        else:
                            logging.info(
                                "\nOutput exists and -over not set - skipping creating of products_uncorrected directory"
                            )
                    else:
                        os.mkdir('products_uncorrected')
                    for item in scienceFrameList:
                        if os.path.exists('products_uncorrected/ctfbrsn' +
                                          item + '.fits'):
                            if over:
                                os.remove('products_uncorrected/ctfbrsn' +
                                          item + '.fits')
                                shutil.copy(
                                    'ctfbrsn' + item + '.fits',
                                    'products_uncorrected/ctfbrsn' + item +
                                    '.fits')
                            else:
                                logging.info(
                                    "\nOutput exists and -over not set - skipping copy of uncorrected cube"
                                )
                        else:
                            shutil.copy(
                                'ctfbrsn' + item + '.fits',
                                'products_uncorrected/ctfbrsn' + item +
                                '.fits')

                    if os.path.exists('products_telluric_corrected'):
                        if over:
                            shutil.rmtree('products_telluric_corrected')
                            os.mkdir('products_telluric_corrected')
                        else:
                            logging.info(
                                "\nOutput exists and -over not set - skipping creating of products_telluric_corrected directory"
                            )
                    else:
                        os.mkdir('products_telluric_corrected')
                    for item in scienceFrameList:
                        if os.path.exists(
                                'products_telluric_corrected/ctfbrsn' + item +
                                '.fits'):
                            if over:
                                os.remove(
                                    'products_telluric_corrected/ctfbrsn' +
                                    item + '.fits')
                                shutil.copy(
                                    'ctfbrsn' + item + '.fits',
                                    'products_telluric_corrected/ctfbrsn' +
                                    item + '.fits')
                            else:
                                logging.info(
                                    "\nOutput exists and -over not set - skipping copy of uncorrected cube"
                                )
                        else:
                            shutil.copy(
                                'ctfbrsn' + item + '.fits',
                                'products_telluric_corrected/ctfbrsn' + item +
                                '.fits')

                    logging.info(
                        "\n##############################################################################"
                    )
                    logging.info("")
                    logging.info(
                        "  STEP 5b: Make uncorrected science data cubes, ->ctfbrsn  - COMPLETED"
                    )
                    logging.info("")
                    logging.info(
                        "##############################################################################\n"
                    )

            valindex += 1

        logging.info(
            "\n##############################################################################"
        )
        logging.info("")
        logging.info("  COMPLETE - Reductions completed for " +
                     str(observationDirectory))
        logging.info("")
        logging.info(
            "##############################################################################\n"
        )

    # Return to directory script was begun from.
    os.chdir(path)
示例#27
0
def main():

    if not os.path.exists('raw/'):
        os.makedirs('raw/')
        for file in glob.glob('S2*.fits'):
            shutil.copyfile(file, 'raw/'+file)

    cleanup()
    copy_raw()

    os.system('mkiraf -f')
    from pyraf import iraf
    iraf.gemini(_doprint=0)
    iraf.gsaoi(_doprint=0)

    # Check if we need to unarchive and grab files
    if len(glob.glob('*.bz2'))>0:
        unpack_files()

        files = glob.glob('S2*.fits')
        if not os.path.exists('raw/'):
            os.makedirs('raw/')

        for file in files:
            shutil.copyfile(file, 'raw/'+file)


    files = glob.glob('S2*.fits')
    if len(files)==0:
        print('WARNING: no fits files to reduce!!!')
        print('Exiting...')
        sys.exit()

    allfiles = sort_files(files)

    if len(glob.glob('*_flat.fits'))==0:
        mask = allfiles['imagetyp']=='FLAT'
        iraf.gaflat(make_inp(allfiles[mask]['filename']),
            fl_vardq=True, fl_dqprop=True, use_off='yes')
    flatimg = glob.glob('*_flat.fits')[0]

    # Try parsing objects into on and off groups
    objtable = allfiles[allfiles['imagetyp']=='OBJECT']
    objs = np.unique(objtable['object'])
    objs = [obj for obj in objs if ('91' not in obj)]

    for obj in objs:

        groups = group_on_off(allfiles, obj, sigoffset=2, maxoffset=15.)

        # Assume we're alternating on/off
        # Check how many pairs we have
        num_group_pairs = len(groups)

        if num_group_pairs==1:
            # Assume not grouped together by on-off pairs and all just on source
            all_files = groups[0][1]
            reduce_input = make_inp(all_files)
            print(reduce_input)
            iraf.gareduce(reduce_input, fl_vardq=True, fl_dark=False,
                flatimg=flatimg, fl_sky=False, fl_dqprop=True, fl_flat=True)

            all_obj = make_inp(all_files, suffix='rg')
            print(all_obj)

        elif num_group_pairs>1:

            all_obj=''
            # Iterate through list by groups of 2
            for i,group in enumerate(groups):
                if group[0]=='off':
                    continue

                on_group = group
                if i==len(groups)-1:
                    off_group = groups[i-1]
                else:
                    off_group = groups[i+1]

                # Sky reduction
                iraf.gaprepare(make_inp(off_group[1]), fl_vardq=True)
                iraf.gasky(make_inp(off_group[1], suffix='g'),
                    outimage='sky{0}.fits'.format(i), fl_vardq=True,
                    fl_dqprop=True, flat=flatimg)

                # Now object reduction
                iraf.gareduce(make_inp(on_group[1]), fl_vardq=True,
                    fl_dark=False, flatimg=flatimg, fl_sky=True, fl_dqprop=True,
                    fl_flat=True, skyimg='sky{0}.fits'.format(i))

                all_obj = make_inp(on_group[1], suffix='rg', add=all_obj)

        # Run disco_stu on mosaic files
        all_files = [f+'.fits' for f in all_obj.split(',')]

        # Clean the amplifiers for all files
        print('Cleaning images amplifier by amplifier')
        for i,file in enumerate(all_files):
            print('file {0}/{1}:'.format(i+1,len(all_files)),file)
            clean_amps(file)

        # Now run disco_stu on everything
        run_disco_stu(' '.join(all_files))

        # Get re-projected images
        proj = glob.glob('*_proj.fits')

        if len(proj)>0:
            objname = sanitize_objname(obj)
            stack_files(proj, objname)
        else:
            print('WARNING: no projected files to stack!')

        # Clean up remaining files
        for file in glob.glob('tmp*'):
            os.remove(file)
        for file in glob.glob('*.log'):
            os.remove(file)
        shutil.rmtree('uparm')
        shutil.rmtree('pyraf')