def set_default(base): # Loading necessary IRAF packages iraf.digiphot(_doprint=0) iraf.daophot(_doprint=0) iraf.apphot(_doprint=0) iraf.reset(min_lenuserarea='200000') iraf.datapars.setParam('datamin','INDEF') iraf.datapars.setParam('datamax','60000') iraf.datapars.setParam('exposure','EXPTIME') iraf.datapars.setParam('airmass', 'AIRMASS') iraf.datapars.setParam('filter', 'FILTER') iraf.findpars.setParam('threshold', 3.0) iraf.findpars.setParam('sharphi', 1) iraf.findpars.setParam('roundhi', 1.3) iraf.findpars.setParam('roundlo', -1.3) iraf.daofind.setParam('verify','no') iraf.daofind.setParam('interactive','no') iraf.photpars.setParam('zmag',25.0) iraf.photpars.setParam('weighti','constant') iraf.photpars.setParam('apertur',3.0) iraf.phot.setParam('output',base+'default') iraf.phot.setParam('coords',base+'default') iraf.phot.setParam('verify','no') iraf.phot.setParam('interactive','no') iraf.fitpsf.setParam('box',10.0) iraf.fitpsf.setParam('verify','no') iraf.fitpsf.setParam('interactive','no') iraf.centerpars.setParam('calgori','none') iraf.fitskypars.setParam('salgorithm','mode') iraf.daopars.setParam('functio','moffat15') iraf.daopars.setParam('varorde','0') iraf.daopars.setParam('nclean','0') iraf.daopars.setParam('saturat','no') iraf.daopars.setParam('fitsky','yes') iraf.daopars.setParam('recenter','yes') iraf.daopars.setParam('groupsk','yes') iraf.daopars.setParam('maxnsta','40000') iraf.psf.setParam('photfile',base+'default') iraf.psf.setParam('pstfile',base+'default') iraf.psf.setParam('psfimage',base+'default') iraf.psf.setParam('opstfile',base+'default') iraf.psf.setParam('groupfil',base+'default') iraf.psf.setParam('interac','no') iraf.psf.setParam('matchby','yes') iraf.psf.setParam('verify','no') iraf.psf.setParam('showplo','no') iraf.allstar.setParam('verify','no')
def barycor(filelist_new): iraf.reset(obsdb='home$obsdb.dat') for i in range(len(filelist_new)): hdulist = fits.open(filelist_new[i]) header_time_of_observation = hdulist[0].header['DATE-OBS'] year_of_observation = int(header_time_of_observation[:4]) month_of_observation = int(header_time_of_observation[5:7]) day_of_observation = int(header_time_of_observation[8:10]) right_ascension = hdulist[0].header['RA'] declination = hdulist[0].header['DEC'] try: ut_of_observation = hdulist[0].header['UT'] except KeyError: ut_of_observation = int(header_time_of_observation[11:13]) + int( header_time_of_observation[14:16]) / 60 + int( header_time_of_observation[17:19]) / 3600 exposure_time = hdulist[0].header['EXP_TIME'] output_filename_dummy = filelist_new[i].replace("norm.", "norm.dummy.") output_filename_dummyI = output_filename_dummy.replace( "norm-1", "norm-1.dummy.") output_filename = output_filename_dummyI.replace( "merged", "merged.dummy.") iraf.scopy(filelist_new[i], output_filename) iraf.hedit(images=output_filename, fields="UT", value=ut_of_observation) iraf.hedit(images=output_filename, fields="EPOCH", value="2000") iraf.hedit(images=output_filename, fields="EXP-TIME", value=exposure_time) iraf.rvcorrect(images=output_filename, year=year_of_observation, month=month_of_observation, day=day_of_observation, ut=ut_of_observation, ra=right_ascension, dec=declination) output_filename_final = output_filename.replace("dummy.", "rvcorrect.") print(output_filename, output_filename_final) iraf.dopcor(output_filename, output_filename_final, redshift="-VHELIO", isvelocity="yes") os.remove(output_filename) hdulist.close()
def set_observatory(obsid): """ obsid -- Examine and set observatory parameters Custom obsdb.dat is at /home/lim9/anaconda3/lib/python3.7/site-packages/lgpy/SAO_KL400 """ import glob import os, sys from pyraf import iraf iraf.noao() iraf.reset( obsdb= '/home/lim9/anaconda3/lib/python3.7/site-packages/lgpy/SAO_KL400/obsdb.dat' ) iraf.observatory(command="set", obsid=obsid)
def __init__(self, shortparlists, parlists, FitsDir, logfile, verbose=1, clean_up=1, skyKey='ALIGNSKY', hdrGain=0, crlower=None, imNsci=1): self.modName = string.split(string.split(str(self))[0], '.')[0][1:] self.shortparlists = shortparlists self.parlists = parlists self.Fits = FitsDir self.verbose = verbose self.crmasks = {} # cosmic ray masks names self.removeList = [] self.clean_up = clean_up self.skyKey = skyKey self.hdrGain = hdrGain self.crlower = crlower if imNsci < 1: raise ValueError, 'Error: pyblot got imNsci = ' + imNsci self.imNsci = imNsci self.logfile = logfile print self.modName, 'version', __version__ self.logfile.write('Instantiating ' + self.modName + ' version ' + __version__) # make sure these packages are loaded iraf.stsdas() iraf.toolbox() iraf.imgtool() iraf.fourier() iraf.fitting() iraf.ttools() iraf.analysis() iraf.dither() # flush the cash! twice! iraf.flpr() iraf.flpr() iraf.reset(imtype='fits') # seems to make deriv task a bit happier iraf.set(tmp='./')
def start(): """ nifsBaselineCalibration This module contains all the functions needed to reduce NIFS GENERAL BASELINE CALIBRATIONS INPUT FILES FOR EACH BASELINE CALIBRATION: Raw files: - Flat frames (lamps on) - Flat frames (lamps off) - Arc frames - Arc dark frames - Ronchi mask flat frames OUTPUT FILES: - Shift file. Eg: sCALFLAT.fits - Bad Pixel Mask. Eg: rgnCALFLAT_sflat_bmp.pl - Flat field. Eg: rgnCALFLAT_flat.fits - Reduced arc frame. Eg: wrgnARC.fits - Reduced ronchi mask. Eg: rgnRONCHI.fits - Reduced dark frame. Eg: rgnARCDARK.fits Args: # Loaded from runtimeData/config.cfg calDirList: list of paths to calibrations. ['path/obj/date/Calibrations_grating'] over (boolean): overwrite old files. Default: False. start (int): starting step of daycal reduction. Specified at command line with -a. Default: 1. stop (int): stopping step of daycal reduction. Specified at command line with -z. Default: 6. debug (boolean): enable optional debugging pauses. Default: False. """ # TODO(nat): stop using first frame from list as name for combined frames. Find better names and implement # them in pipeline and docs. # TODO(nat): Finish converting the print statements to logging.info() statements. # Store current working directory for later use. path = os.getcwd() # Set up the logging file. log = os.getcwd()+'/Nifty.log' logging.info('#################################################') logging.info('# #') logging.info('# Start the NIFS Baseline Calibration Reduction #') logging.info('# #') logging.info('#################################################') # Set up/prepare IRAF. iraf.gemini() iraf.nifs() iraf.gnirs() iraf.gemtools() # Reset to default parameters the used IRAF tasks. iraf.unlearn(iraf.gemini,iraf.gemtools,iraf.gnirs,iraf.nifs) # From http://bishop.astro.pomona.edu/Penprase/webdocuments/iraf/beg/beg-image.html: # Before doing anything involving image display the environment variable # stdimage must be set to the correct frame buffer size for the display # servers (as described in the dev$graphcap file under the section "STDIMAGE # devices") or to the correct image display device. The task GDEVICES is # helpful for determining this information for the display servers. iraf.set(stdimage='imt2048') # Prepare the IRAF package for NIFS. # NSHEADERS lists the header parameters used by the various tasks in the # NIFS package (excluding headers values which have values fixed by IRAF or # FITS conventions). iraf.nsheaders("nifs",logfile=log) # Set clobber to 'yes' for the script. This still does not make the gemini # tasks overwrite files, so: # YOU WILL LIKELY HAVE TO REMOVE FILES IF YOU RE_RUN THE SCRIPT. user_clobber=iraf.envget("clobber") iraf.reset(clobber='yes') # Load reduction parameters from ./config.cfg. with open('./config.cfg') as config_file: options = ConfigObj(config_file, unrepr=True) calDirList = options['calibrationDirectoryList'] over = options['over'] start = options['rstart'] stop = options['rstop'] debug = options['debug'] ################################################################################ # Define Variables, Reduction Lists AND identify/run number of reduction steps # ################################################################################ # Loop over the Calibrations directories and reduce the day calibrations in each one. for calpath in calDirList: os.chdir(calpath) pwdDir = os.getcwd()+"/" iraffunctions.chdir(pwdDir) # However, don't do the reduction for a Calibration_"grating" directory without associated telluric or science data. # Check that a "grating" directory exists at the same level as the Calibrations_"grating" directory. # If not, skip the reduction of calibrations in that Calibrations_grating directory. # "grating" should be the last letter of calpath. grating = calpath[-1] if not os.path.exists("../"+grating): print "\n##############################################################################" print "" print " No grating directory (including science or telluric data) found for " print " ", calpath print " Skipping reduction of calibrations in that directory." print "" print "##############################################################################\n" continue # Create lists of each type of calibration from textfiles in Calibrations directory. flatlist = open('flatlist', "r").readlines() flatdarklist = open("flatdarklist", "r").readlines() arcdarklist = open("arcdarklist", "r").readlines() arclist = open("arclist", "r").readlines() ronchilist = open("ronchilist", "r").readlines() # Store the name of the first image of each calibration-type-list in # a variable for later use (Eg: calflat). This is because gemcombine will # merge a list of files (Eg: "n"+flatlist) and the output file will have the same # name as the first file in the list (Eg: calflat). These first file names are used # later in the pipeline. calflat = (flatlist[0].strip()).rstrip('.fits') flatdark = (flatdarklist[0].strip()).rstrip('.fits') arcdark = (arcdarklist[0].strip()).rstrip('.fits') arc = (arclist[0].strip()).rstrip('.fits') ronchiflat = (ronchilist[0].strip()).rstrip('.fits') # Check start and stop values for reduction steps. Ask user for a correction if # input is not valid. valindex = start while valindex > stop or valindex < 1 or stop > 4: print "\n#####################################################################" print "#####################################################################" print "" print " WARNING in calibrate: invalid start/stop values of calibration " print " reduction steps." print "" print "#####################################################################" print "#####################################################################\n" valindex = int(raw_input("\nPlease enter a valid start value (1 to 4, default 1): ")) stop = int(raw_input("\nPlease enter a valid stop value (1 to 4, default 4): ")) # Print the current directory of calibrations being processed. print "\n#################################################################################" print " " print " Currently working on calibrations " print " in ", calpath print " " print "#################################################################################\n" while valindex <= stop: ############################################################################# ## STEP 1: Determine the shift to the MDF (mask definition file) ## ## using nfprepare (nsoffset). Ie: locate the spectra. ## ## Output: First image in flatlist with "s" prefix. ## ############################################################################# if valindex == 1: if debug: a = raw_input("About to enter step 1: locate the spectrum.") getShift(calflat, over, log) print "\n###################################################################" print "" print " STEP 1: Locate the Spectrum (Determine the shift to the MDF) - COMPLETED" print "" print "###################################################################\n" ############################################################################# ## STEP 2: Create Flat Field frame and BPM (Bad Pixel Mask) ## ## Output: Flat Field image with spatial and spectral information. ## ## First image in flatlist with "rgn" prefix and "_flat" suffix. ## ############################################################################# elif valindex == 2: if debug: a = raw_input("About to enter step 2: flat field.") makeFlat(flatlist, flatdarklist, calflat, flatdark, over, log) print "\n###################################################################" print "" print " STEP 2: Flat Field (Create Flat Field image and BPM image) - COMPLETED " print "" print "###################################################################\n" ############################################################################ ## STEP 3: NFPREPARE and Combine arc darks. ## ## NFPREPARE, Combine and flat field arcs. ## ## Determine the wavelength solution and create the wavelength ## ## referenced arc. ## ############################################################################ elif valindex == 3: if debug: a = raw_input("About to enter step 3: wavelength solution.") reduceArc(arclist, arc, arcdarklist, arcdark, log, over) wavecal(arc, log, over, path) print "\n###################################################################" print "" print " STEP 3: Wavelength Solution (NFPREPARE and Combine arc darks. " print " NFPREPARE, Combine and flat field arcs." print " Determine the wavelength solution and create the" print " wavelength referenced arc) - COMPLETED" print "" print "###################################################################\n" ###################################################################################### ## Step 4: Trace the spatial curvature and spectral distortion in the Ronchi flat. ## ###################################################################################### elif valindex == 4: if debug: a = raw_input("About to enter step 4: spatial distortion.") ronchi(ronchilist, ronchiflat, calflat, over, flatdark, log) print "\n###################################################################" print "" print " Step 4: Spatial Distortion (Trace the spatial curvature and spectral distortion " print " in the Ronchi flat) - COMPLETED" print "" print "###################################################################\n" else: print "\nERROR in nifs_baseline_calibration: step ", valindex, " is not valid.\n" raise SystemExit valindex += 1 print "\n##############################################################################" print "" print " COMPLETE - Calibration reductions completed for " print " ", calpath print "" print "##############################################################################\n" # Return to directory script was begun from. os.chdir(path) return
def reduceDark(dListFn, dMasterFn, mBiasFn): if os.path.isfile(dMasterFn + ".fits"): return zerocor = "no" if os.path.isfile(mBiasFn + ".fits"): zerocor = "yes" print("File list: @" + dListFn) print("dMasterFn: " + dMasterFn) iraf.reset(use_new_imt="no") iraf.ccdproc( "@" + dListFn, output="@" + dListFn + "//atlas_b", ccdtype=" ", max_cache=0, noproc="no", fixpix="no", overscan="no", trim="no", zerocor=zerocor, darkcor="no", flatcor="no", illumcor="no", fringecor="no", readcor="no", scancor="no", readaxis="line", fixfile="", biassec="", trimsec="", zero=mBiasFn, dark=" ", flat="", illum="", fringe="", minreplace=1.0, scantype="shortscan", nscan=1, interactive="no", function="legendre", order=1, sample="*", naverage=1, niterate=1, low_reject=3.0, high_reject=3.0, grow=0.0, ) iraf.imcombine( "@" + dListFn + "//atlas_b", output=dMasterFn, headers="", bpmasks="", rejmasks="", nrejmasks="", expmasks="", sigmas="", imcmb="$I", logfile=dMasterFn + ".log", combine="median", reject="crreject", project="no", outtype="real", outlimits="", offsets="none", masktype="none", maskvalue="0", blank=0.0, scale="exposure", zero="none", weight="none", statsec="", expname="", lthreshold="INDEF", hthreshold="INDEF", nlow=1, nhigh=1, nkeep=1, mclip="yes", lsigma=3.0, hsigma=3.0, rdnoise="0.", gain="1.", snoise="0.", sigscale=0.1, pclip=-0.5, grow=0.0, )
function="legendre", order=1, sample="*", naverage=1, niterate=1, low_reject=3.0, high_reject=3.0, grow=0.0, ) if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(name)s %(levelname)s: %(message)s") logger = logging.getLogger(__name__) iraf.reset(use_new_imt="no") iraf.imred() iraf.ccdred() root = sys.argv[1] atlas0 = root + "atlas_1/" logger.info("Root: " + sys.argv[1]) os.chdir(root) fitsExt = ".fits" bListRgx = ".biass*" dListRgx = ".darks*" fListRgx = ".flats*" dataFolder = "atlas_2/" if not os.path.exists(dataFolder):
""" Display Gattini images using the program C{ds9}. """ import os import time from pyraf import iraf iraf.reset(stdimage="imt1600") from processing.new_ops import produce_flat, get_cam_flat_filename def _prep_ds9(): """ Open up an instance of C{ds9} if one is not already running. """ if len(os.popen("ps aux | grep ds9 | grep $USER | grep -v grep").readlines()) == 0: os.system("ds9 &") time.sleep(3) def display(outfile, frame=1): """ Display a given file, using an optional frame number. """ iraf.display(outfile, frame) def single_display(image_id, frame=1): """ Display the flat field reduced image for a given image ID (as defined in the GDB). """ _prep_ds9()
def run(): """ Merge final cubes. """ # Store current working directory for later use. path = os.getcwd() # Set up iraf iraf.gemini() iraf.nifs() iraf.gnirs() iraf.gemtools() # Unlearn the used tasks. iraf.unlearn(iraf.gemini,iraf.gemtools,iraf.gnirs,iraf.nifs) # Prepare the package for NIFS iraf.nsheaders("nifs",logfile="Nifty.log") iraf.set(stdimage='imt2048') user_clobber=iraf.envget("clobber") iraf.reset(clobber='yes') # Set up the logging file. log = os.getcwd()+'/Nifty.log' logging.info('\n#################################################') logging.info('# #') logging.info('# Start the NIFS Final Cube Merging #') logging.info('# #') logging.info('#################################################\n') # Load reduction parameters from ./config.cfg. with open('./config.cfg') as config_file: config = ConfigObj(config_file, unrepr=True) # Read general pipeline config. manualMode = config['manualMode'] over = config['over'] scienceDirectoryList = config['scienceDirectoryList'] # Read baselineCalibrationReduction specfic config. mergeConfig = config['mergeConfig'] start = mergeConfig['mergeStart'] stop = mergeConfig['mergeStop'] mergeType = mergeConfig['mergeType'] use_pq_offsets = mergeConfig['use_pq_offsets'] im3dtran = mergeConfig['im3dtran'] valindex = start while valindex <= stop: # There are three types of merging to choose from. You can: if valindex == 1: # Merge uncorrected cubes. These have the "ctfbrsn" prefix. mergeCubes(scienceDirectoryList, "uncorrected", mergeType, use_pq_offsets, im3dtran, over) logging.info("\n##############################################################################") logging.info("") logging.info(" STEP 1 - Merge Uncorrected Individual Observations - COMPLETED ") logging.info("") logging.info("##############################################################################\n") if valindex == 2: # Merge merged cubes from each observation. finalMergeCubes(mergeType, over) logging.info("\n##############################################################################") logging.info("") logging.info(" STEP 2 - Merge Uncorrected Merged Observation Cubes - COMPLETED ") logging.info("") logging.info("##############################################################################\n") if valindex == 3: # Merge telluric corrected cubes. These have the "actfbrsn" prefix. mergeCubes(scienceDirectoryList, "telluricCorrected", mergeType, use_pq_offsets, im3dtran, over) logging.info("\n##############################################################################") logging.info("") logging.info(" STEP 3 - Merge Telluric Corrected Individual Observations - COMPLETED ") logging.info("") logging.info("##############################################################################\n") if valindex == 4: # Merge merged cubes from each observation. finalMergeCubes(mergeType, over) logging.info("\n##############################################################################") logging.info("") logging.info(" STEP 4 - Merge Telluric Corrected Merged Observation Cubes - COMPLETED ") logging.info("") logging.info("##############################################################################\n") if valindex == 5: # Merge telluric corrected AND flux calibrated cubes. These have the "factfbrsn" prefix. mergeCubes(scienceDirectoryList, "telCorAndFluxCalibrated", mergeType, use_pq_offsets, im3dtran, over) logging.info("\n##############################################################################") logging.info("") logging.info(" STEP 5 - Merge Telluric Corrected and Flux Calibrated Cubes - COMPLETED ") logging.info("") logging.info("##############################################################################\n") if valindex == 6: # Merge merged cubes from each observation. finalMergeCubes(mergeType, over) logging.info("\n##############################################################################") logging.info("") logging.info(" STEP 6 - Merge Telluric Corrected AND Flux Calibrated Cubes - COMPLETED ") logging.info("") logging.info("##############################################################################\n") valindex += 1
def calcoffset(inlist, inpref='', trace=True, review=False): # open input list and check if it exists inimg_arr = check_input(inlist, inpref) if isinstance(inimg_arr,int): return 1 # check output geomap file gmp_arr = [] for i in range(len(inimg_arr)): # geomap input file name fname,ext = os.path.splitext(inimg_arr[i]) gmp_arr.append(fname + '.gmp') if os.access(gmp_arr[i], os.R_OK): print >> sys.stderr, 'operation would overwrite existing file (%s)' % gmp_arr[i] return 1 # open ds9 d = ds9() d.set('regions delete all') # unlearn IRAF commands iraf.reset(stdimage='imt1024') iraf.unlearn('display') iraf.unlearn('rimexam') iraf.unlearn('imexam') iraf.unlearn('hedit') # review rimexam parameters if review: iraf.epar('rimexam') # prefix for temporary file tmp = tempfile.NamedTemporaryFile(suffix='', prefix='', dir='/tmp') tmp_prefix = tmp.name tmp.close() # save imexam parameters into temporary files tmp_par = tmp_prefix + '.par' iraf.rimexam.saveParList(filename=tmp_par) # region file for the first image tmp0_reg = tmp_prefix+'_0.reg' # measure position i = 0 k = 0 xarr = [] yarr = [] peak_arr = [] fwhm_arr = [] gsx0 = 0.0 gsy0 = 0.0 nobj = 1 trace0 = trace for i in range(len(inimg_arr)): # initialize trace parameter trace = trace0 # get dithering position from header im = pyfits.open(inimg_arr[i], mode='update') try: ao_mode = im[0].header['D_MODE'] ao_loop = im[0].header['D_LOOP'] except KeyError: trace = False # read guide star coordinates gsx = 0.0 gsy = 0.0 if ao_loop.lower() == 'on': if ao_mode.lower().find('lgs') == -1: try: gsx = float(im[0].header['D_AU1GSX']) gsy = float(im[0].header['D_AU1GSY']) except KeyError: trace = False else: try: gsx = float(im[0].header['D_AU2GSX']) gsy = float(im[0].header['D_AU2GSY']) except KeyError: trace = False else: trace = False if k == 0: gsx0 = gsx gsy0 = gsy # load rimexam parameters iraf.rimexam.setParList(ParList=tmp_par) # display image d.set('regions delete all') iraf.display(inimg_arr[i], 1) # automatic object pickup calc_ng = 1 if k >0 and trace == True: xobj = [] yobj = [] peak = [] fwhm = [] # guess object position tmp_reg = tmp_prefix+'.reg' if os.access(tmp_reg, os.R_OK): os.remove(tmp_reg) freg = open(tmp_reg, 'w') for ii in range(nobj): xg = xref[ii] + (gsx - gsx0) yg = yref[ii] + (gsy - gsy0) freg.write('image; circle %.3f %.3f 10 # color=blue text={%d}\n' % (xg, yg, (ii+1))) freg.close() d.set('regions load %s' % tmp_reg) # check object position guess OutOfRange = 0 for ii in range(nobj): xg = xref[ii] + (gsx - gsx0) yg = yref[ii] + (gsy - gsy0) if xg <= 0 or xg >= 1024 or yg <= 0 or yg >= 1024: OutOfRange = 1 # pickup objects if OutOfRange == 0: if os.access(tmp_reg, os.R_OK): os.remove(tmp_reg) freg = open(tmp_reg, 'w') for ii in range(nobj): xg = xref[ii] + (gsx - gsx0) yg = yref[ii] + (gsy - gsy0) tmp_coo = tmp_prefix+'_coo.dat' if os.access(tmp_coo, os.R_OK): os.remove(tmp_coo) fcoo = open(tmp_coo, 'w') fcoo.write('%.3f %.3f a\n' % (xg, yg)) fcoo.close() imexam_ng = 1 auto_skip = 0 while imexam_ng == 1: try: ret = iraf.imexam(inimg_arr[i],1,imagecu=tmp_coo, use_dis='no', Stdout=1) imexam_ng = 0 auto_skip = 0 except: print '\nIRAF imexam failed for object No. %d' % (ii+1) print 'Try again by changing rimexam parameters' print 'Hit return to enter rimexam parameter setting window' print 'Type \'q\' to skip this object.' check = '' while check.lower() != 'q' and check.lower() != 'rimexam': check = raw_input('Hit return or type \'q\':') if check.lower() == '' or check.lower() == 'rimexam': check = 'rimexam' print 'Push Save&Quit button to quit from the parameter setting window' iraf.epar('rimexam') imexam_ng = 1 elif check.lower() == 'q': check = 'q' auto_skip = 1 imexam_ng = 0 else : print 'Error: unknown answer (%s)' % (check) os.remove(tmp_coo) if auto_skip == 0: # display result for j in range(len(ret)): print ret[j] print '\n' # parse results param1 = ret[len(ret)-2].split() param2 = ret[len(ret)-1].split() if len(param1) == 4 and len(param2) == 11: if isfloat(param1[0]): xobj.append(float(param1[0])) if isfloat(param1[0]): yobj.append(float(param1[1])) if isfloat(param2[4]): peak.append(float(param2[4])) else: peak.append(-9999.0) if isfloat(param2[9]): fwhm.append(float(param2[9])) else: fwhm.append(-9999.0) freg.write('image; circle %.3f %.3f 5 # color=red\n' % (xobj[ii], yobj[ii])) else : xobj.append(-9999.0) yobj.append(-9999.0) peak.append(-9999.0) fwhm.append(-9999.0) else: xobj.append(-9999.0) yobj.append(-9999.0) peak.append(-9999.0) fwhm.append(-9999.0) freg.close() d.set('regions load %s' % tmp_reg) os.remove(tmp_reg) check = raw_input('Is this position okay? (yes/no/skip)') if check.lower() == '' or check.lower() == 'yes': calc_ng = 0 # update reference points nskip = 0 for ii in range(len(xobj)): if xobj[ii] < 0: nskip += 1 if nobj == len(xobj) and nskip == 0: gsx0 = gsx gsy0 = gsy xref = xobj yref = yobj elif check.lower() == 'skip': calc_ng = 0 skip_image = 1 xobj = [] yobj = [] peak = [] fwhm = [] for ii in range(nobj): xobj.append(-9999.0) yobj.append(-9999.0) peak.append(-9999.0) fwhm.append(-9999.0) else : calc_ng = 1 else: print "" print "Warning:" print "Estimated position is out of range" print "Pick up object manually\n" calc_ng = 1 # calculate position and fwhm if calc_ng == 1: skip_image = 0 imexam_ng = 1 calc_ok = 0 while calc_ok != 1: # display image d.set('regions delete all') iraf.display(inimg_arr[i], 1) # show position of the objects in the first image if k != 0: if not trace: d.set('regions load %s' % tmp0_reg) xobj = [] yobj = [] peak = [] fwhm = [] skip_obj = 0 n_skip_obj = 0 for nn in range(nobj): imexam_ng = 1 skip_obj = 0 if trace: xg = xref[nn] + (gsx - gsx0) yg = yref[nn] + (gsy - gsy0) tmp_reg = tmp_prefix+'.reg' if os.access(tmp_reg, os.R_OK): os.remove(tmp_reg) freg = open(tmp_reg, 'w') freg.write('image; circle %.3f %.3f 20 # color=blue text={%d}\n' % (xg, yg, (nn+1))) freg.close() d.set('regions delete all') d.set('regions load %s' % tmp_reg) while imexam_ng == 1: print '\n' print '##### Pickup object No. %d / %d #####' % (nn+1, nobj) print '\n' print '\nSelect Object --> type a , Quit --> type q on the ds9 image' print 'To skip this image type q on the ds9 image and then this image will not be used' print 'Caution: do not type any key except a or q' try : ret = iraf.imexam(inimg_arr[i],1,Stdout=1) if len(ret) == 0: skip_obj = 1 while len(ret) < 4 and skip_obj == 0: print '\nSelect Object --> type a , Quit --> type q on the ds9 image' print 'To skip this image type q on the ds9 image and then this image will not be used' print 'Caution: do not type any key except a or q' ret = iraf.imexam(inimg_arr[i],1,Stdout=1) if len(ret) == 0: skip_obj = 1 imexam_ng = 0 except: print '\nIRAF imexam failed' print 'Try again by changing rimexam parameters' print 'Hit return to enter rimexam parameter setting window' print 'To skip this image type \'q\' and then this image will not be used.' check = '' while check.lower() != 'q' and check.lower() != 'rimexam': check = raw_input('Hit return or type \'q\':') if check.lower() == '' or check.lower() == 'rimexam': check = 'rimexam' print 'Push Save\&Quit to quit from the parameter setting window' iraf.epar('rimexam') imexam_ng = 1 elif check.lower() == 'q': check = 'q' skip_obj = 1 imexam_ng = 0 else : print 'Error: unknown answer (%s)' % (check) if skip_obj == 0: # display result for j in range(len(ret)): print ret[j] print '\n' # parse results for ii in range(len(ret)): if not ret[ii].startswith('#') and len(ret[ii].split()) > 2: param = ret[ii].split() if len(param) == 4: if isfloat(param[0]): xobj_tmp = float(param[0]) if isfloat(param[1]): yobj_tmp = float(param[1]) elif len(param) == 11: if isfloat(param[4]): peak_tmp = float(param[4]) else: peak_tmp = -9999.0 if isfloat(param[9]): fwhm_tmp = float(param[9]) else: fwhm_tmp = -9999.0 else: print >> sys.stderr, 'failed to pick up object in %s' % inimg remove_temp_all(tmp_prefix) return 1 xobj.append(xobj_tmp) yobj.append(yobj_tmp) peak.append(peak_tmp) fwhm.append(fwhm_tmp) else: xobj.append(-9999.0) yobj.append(-9999.0) peak.append(-9999.0) fwhm.append(-9999.0) n_skip_obj += 1 # check number of skipped objects if n_skip_obj == nobj: skip_image = 1 calc_ok = 1 else: skip_image = 0 else: xobj = [] yobj = [] peak = [] fwhm = [] while imexam_ng == 1: print '\nSelect Object --> type a , Quit --> type q on the ds9 image' print 'To skip this image type q on the ds9 image and then this image will not be used' print 'Caution: do not type any key except a or q' try : ret = iraf.imexam(inimg_arr[i],1,Stdout=1) if len(ret) == 0: skip_image = 1 while len(ret) < 4 and skip_image == 0: print '\nSelect Object --> type a , Quit --> type q on the ds9 image' print 'To skip this image type q on the ds9 image and then this image will not be used' print 'Caution: do not type any key except a or q' ret = iraf.imexam(inimg_arr[i],1,Stdout=1) if len(ret) == 0: skip_image = 1 imexam_ng = 0 except: print '\nIRAF imexam failed' print 'Try again by changing rimexam parameters' print 'Hit return to enter rimexam parameter setting window' print 'To skip this image type \'q\' and then this image will not be used.' check = '' while check.lower() != 'q' and check.lower() != 'rimexam': check = raw_input('Hit return or type \'q\':') if check.lower() == '' or check.lower() == 'rimexam': check = 'rimexam' print 'Push Save\&Quit to quit from the parameter setting window' iraf.epar('rimexam') imexam_ng = 1 elif check.lower() == 'q': check = 'q' skip_image = 1 imexam_ng = 0 else : print 'Error: unknown answer (%s)' % (check) if skip_image == 0: # display result for j in range(len(ret)): print ret[j] print '\n' # parse results for ii in range(len(ret)): if not ret[ii].startswith('#') and len(ret[ii].split()) > 2: param = ret[ii].split() if len(param) == 4: if isfloat(param[0]): xobj.append(float(param[0])) if isfloat(param[1]): yobj.append(float(param[1])) elif len(param) == 11: if isfloat(param[4]): peak.append(float(param[4])) else: peak.append(-9999.0) if isfloat(param[9]): fwhm.append(float(param[9])) else: fwhm.append(-9999.0) else: print >> sys.stderr, 'failed to pick up object in %s' % inimg remove_temp_all(tmp_prefix) return 1 # check consistency nobj = len(xobj) if nobj != len(yobj) or nobj != len(peak) or nobj != len(fwhm): print >> sys.stderr, 'Number of the recorded objects is inconsistent' remove_temp_all(tmp_prefix) return 1 xref = xobj yref = yobj # save dummy values for the skipped frames at the beginning if i != 0: for ii in range(i): xarr[ii] = [] yarr[ii] = [] for jj in range(nobj): xarr[ii].append(-9999.0) yarr[ii].append(-9999.0) peak_arr[ii].append(-9999.0) fwhm_arr[ii].append(-9999.0) # save position of the objects in the first image if os.access(tmp0_reg, os.R_OK): os.remove(tmp0_reg) freg0 = open(tmp0_reg, 'w') for ii in range(nobj): freg0.write('image; point(%.3f,%.3f) # point=x text={%d}\n' % (xobj[ii], yobj[ii], (ii+1))) freg0.close() else: xobj.append(-9999.0) yobj.append(-9999.0) peak.append(-9999.0) fwhm.append(-9999.0) calc_ok = 1 if skip_image == 0: # show results on ds9 tmp_reg = tmp_prefix+'.reg' if os.access(tmp_reg, os.R_OK): os.remove(tmp_reg) freg = open(tmp_reg, 'w') if len(xobj) > 0: for ii in range(nobj): if xobj[ii] >= 0: freg.write('image; circle %.3f %.3f 5 # color=red text={%d}\n' % (xobj[ii], yobj[ii], (ii+1))) freg.close() d.set('regions load %s' % tmp_reg) os.remove(tmp_reg) print 'Is this position okay? ' print '<options>' print ' yes or return : accept this postion' print ' no or n : measure position again' print ' r or rimexam : change rimexam parameters' print ' q or quit : skip this image' check = raw_input('') if check.lower() == '' or check.lower() == 'yes': calc_ok = 1 xref = xobj yref = yobj gsx0 = gsx gsy0 = gsy elif check.lower() == 'r' or check.lower() == 'rimexam': print 'Push Save\&Quit to quit from the parameter setting window\n' iraf.epar('rimexam') calc_ok = 0 imexam_ng = 1 elif check.lower() == 'q' or check.lower() == 'quit': print 'Skip image (%s)' % inimg_arr[i] xobj = [] yobj = [] peak = [] fwhm = [] if k == 0: xobj.append(-9999.0) yobj.append(-9999.0) peak.append(-9999.0) fwhm.append(-9999.0) else: for ii in range(nobj): xobj.append(-9999.0) yobj.append(-9999.0) peak.append(-9999.0) fwhm.append(-9999.0) calc_ok = 1 else : imexam_ng = 1 calc_ok = 0 # save position into array xarr.append(xobj) yarr.append(yobj) peak_arr.append(peak) fwhm_arr.append(fwhm) # close image handler im.close() # increment counter if skip_image == 0: k += 1 # remove all temporary files remove_temp_all(tmp_prefix) # save measured coordinates into geomap input file and fits header for i in range(len(inimg_arr)): # open image handler im = pyfits.open(inimg_arr[i], mode='update') # open file handler fgmp = open(gmp_arr[i], 'w') for j in range(nobj): # record imexam results into header key = 'xc%d' % (j+1) im[0].header.update(key,xarr[i][j]) key = 'yc%d' % (j+1) im[0].header.update(key,yarr[i][j]) key = 'peak%d' % (j+1) im[0].header.update(key,peak_arr[i][j]) key = 'fwhm%d' % (j+1) im[0].header.update(key,fwhm_arr[i][j]) if xarr[i][j] >= 0: fgmp.write('%.3f %.3f %.3f %.3f\n' % (xref[j],yref[j],xarr[i][j],yarr[i][j])) else: fgmp.write('#%.3f %.3f %.3f %.3f\n' % (xref[j],yref[j],xarr[i][j],yarr[i][j])) # close image handler im.close() # close file handler fgmp.close() return 0
def start(obsDirList, use_pq_offsets, im3dtran, over=""): """MERGE This module contains all the functions needed to merge the final data cubes. NOTE: If you wish to shift the cubes manually in QFits View you can combine them in this script by making sure that you attach the prefix "shif" to each shifted image and save them in the observation directory (ie. obs108). This is necessary for very faint objects. INPUT: - Reference data cubes - A list of paths where final data cubes are located - Transformed integral field spectra OUTPUT: - Merged cubes for each observation (ie. DATE_obs##(#).fits) - One final merged cube from entire observation program """ # Store the current working directory so we can find our way back later on. path = os.getcwd() iraf.gemini() iraf.nifs() iraf.gnirs() iraf.gemtools() # Unlearn the used tasks. iraf.unlearn(iraf.gemini, iraf.gemtools, iraf.gnirs, iraf.nifs) # Prepare the package for NIFS iraf.nsheaders("nifs", logfile="Nifty.log") iraf.set(stdimage='imt2048') user_clobber = iraf.envget("clobber") iraf.reset(clobber='yes') # Set the default logfile for iraf tasks. # TODO: Set the logfile for all iraf tasks! Right now it is not logging their output because of im3dtran... # It seems im3dtran doesn't have a "log" parameter. log = "Nifty.log" # Change to the directory in iraf. iraffunctions.chdir(path) # Create some lists here. listsOfCubes = [ ] # List of lists of cubes (one list for each science observation directory). mergedCubes = [ ] # List of Merged cubes (one merged cube for each science observation directory). obsidlist = [] # List of science observation id s. # Pixel scale in arcseconds/pixel. pixScale = 0.05 # TODO(ncomeau[*AT*]uvic.ca): implement a way to read and save cubelists to textfiles. It would be nice for users to # be able to edit the list of cubes to merge by hand. # If no Merged directory exists that contains a textfile list of cubes: # Go to each science directory and copy cubes from there to a new directory called Merged. for obsDir in obsDirList: # Get date, obsid and obsPath by splitting each science directory name. # Eg: directory name is ""/Users/ncomeau/research/newer-nifty/hd165459/20160705/H/obs13", then: # temp1 == ('/Users/ncomeau/research/newer-nifty/hd165459/20160705/H', 'obs13') # temp2 == ('/Users/ncomeau/research/newer-nifty/hd165459/20160705', 'H') # temp3 == ('/Users/ncomeau/research/newer-nifty/hd165459', '20160705') # temp4 == ('/Users/ncomeau/research/newer-nifty', 'hd165459') # TODO: make this clearer. temp1 = os.path.split(obsDir) temp2 = os.path.split(temp1[0]) temp3 = os.path.split(temp2[0]) temp4 = os.path.split(temp3[0]) objname = temp4[1] date = temp3[1] obsid = temp1[1] obsPath = temp3[0] os.chdir(obsDir) obsidlist.append(date + '_' + obsid) # Create a directory called Merged and copy all the data cubes to this directory. if not os.path.exists(obsPath + '/Merged/'): os.mkdir(obsPath + '/Merged/') logging.info('I am creating a directory called Merged') Merged = obsPath + '/Merged' if not os.path.exists(Merged + '/' + date + '_' + obsid): os.mkdir(Merged + '/' + date + '_' + obsid) logging.info( 'I am creating a directory with date and abs ID inside Merged ' ) # If a list called shiftedcubes already exists then just merge those shifted cubes and continue. if glob.glob("./shift*.fits"): if over: if os.path.exists('./' + obsid + '_merged.fits'): os.remove('./' + obsid + '_merged.fits') iraf.gemcube(input="shif*.fits[SCI]", output=obsid + '_merged', logfile=log) elif not os.path.exists('./' + obsid + '_merged.fits'): iraf.gemcube(input="shif*.fits[SCI]", output=obsid + '_merged', logfile=log) else: logging.info( "Output exists and -over- not set - shifted cubes are not being merged" ) shutil.copy('./' + obsid + '_merged.fits', Merged) if obsDir == obsDirList[-1]: return else: continue # Create a list called cubes, which stores all the cubes from a particular night. # Store all the cubes lists in a list of lists called listsOfCubes. # TODO: syntax is fairly ugly; there may be a better way to do this. cubes = glob.glob( 'catfbrsnN*.fits' ) # Cubes order at this point is arbitrary so we need to sort. cubes.sort(key=lambda x: x[-8:-5] ) # Sort cubes in increasing order by last three digits. if cubes: listsOfCubes.append(cubes) else: cubes = glob.glob('cptfbrsnN*.fits') if cubes: cubes.sort( key=lambda x: x[-8:-5] ) # Sort cubes in increasing order by last three digits. listsOfCubes.append(cubes) else: cubes = glob.glob('ctfbrsnN*.fits') if cubes: cubes.sort( key=lambda x: x[-8:-5] ) # Sort cubes in increasing order by last three digits. listsOfCubes.append(cubes) else: logging.info( "\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) logging.info( "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) logging.info("") logging.info(" ERROR in merge: no cubes found!") logging.info("") logging.info( "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) logging.info( "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n" ) raise SystemExit # Copy cubes to their respective data_obsid directory within Merged. for cube in cubes: shutil.copy(cube, Merged + '/' + date + '_' + obsid) os.chdir(Merged) n = 0 for cubes in listsOfCubes: shiftlist = [] os.chdir(Merged + '/' + obsidlist[n]) iraffunctions.chdir(Merged + '/' + obsidlist[n]) if use_pq_offsets: # Set the zero point p and q offsets to the p and q offsets of the first cube in each list of cubes. header = astropy.io.fits.open(cubes[0]) p0 = header[0].header['POFFSET'] q0 = header[0].header['QOFFSET'] foff = open('offsets.txt', 'w') foff.write('%d %d %d\n' % (0, 0, 0)) foff.close() suffix = cubes[0][-8:-5] if im3dtran: if os.path.exists('transcube' + suffix + '.fits'): if not over: logging.info( 'Output already exists and -over- not set - skipping im3dtran' ) if over: os.remove('transcube' + suffix + '.fits') iraf.im3dtran(input=cubes[0] + '[SCI][*,*,-*]', new_x=1, new_y=3, new_z=2, output='transcube' + suffix) else: iraf.im3dtran(input=cubes[0] + '[SCI][*,*,-*]', new_x=1, new_y=3, new_z=2, output='transcube' + suffix) else: iraf.imcopy(cubes[0] + '[SCI][*,*,*]', 'NONtranscube' + suffix + '.fits') shiftlist.append('cube' + suffix + '.fits') iraffunctions.chdir(os.getcwd()) for i in range(len(cubes)): # Skip the first cube! if i == 0: continue header2 = astropy.io.fits.open(cubes[i]) suffix = cubes[i][-8:-5] # If user wants to merge using p and q offsets, grab those from .fits headers. if use_pq_offsets: # find the p and q offsets of the other cubes in the sequence. xoff = header2[0].header['POFFSET'] yoff = header2[0].header['QOFFSET'] # calculate the difference between the zero point offsets and the offsets of the other cubes and convert that to pixels xShift = round((xoff - p0) / pixScale) yShift = round((yoff - q0) / pixScale) # write all offsets to a text file (keep in mind that the x and y offsets use different pixel scales) foff = open('offsets.txt', 'a') if im3dtran: # If we swap the y and lambda axis we must also write the offsets in x, lambda, y. foff.write('%d %d %d\n' % (int(xShift), 0, int(yShift))) else: # Write offsets in regular x, y, lambda. foff.write('%d\t%d\t%d\n' % (xShift, yShift, 0.)) foff.close() if im3dtran: prefix = 'transcube' if os.path.exists('transcube' + suffix + '.fits'): if not over: logging.info( 'Output already exists and -over- not set - skipping im3dtran' ) if over: os.remove('transcube' + suffix + '.fits') iraf.im3dtran(input=cubes[i] + '[SCI][*,*,-*]', new_x=1, new_y=3, new_z=2, output='transcube' + suffix) else: iraf.im3dtran(input=cubes[i] + '[SCI][*,*,-*]', new_x=1, new_y=3, new_z=2, output='transcube' + suffix) else: prefix = 'NONtranscube' iraf.imcopy(cubes[i] + '[SCI][*,*,*]', prefix + suffix + '.fits') shiftlist.append('cube' + suffix + '.fits') if not use_pq_offsets: # Before we combine make sure a suitable offsets.txt file exists. a = raw_input( "\nPaused. Please provide a suitable offsets.txt file in ", Merged + '/' + obsidlist[n]) while not os.path.exists('offsets.txt'): a = raw_input("No offsets.txt file found. Please try again.") logging.info('offsets.txt found successfully for', obsidlist[n]) if os.path.exists('cube_merged.fits'): if over: os.remove('cube_merged.fits') iraf.imcombine(prefix + '*', output='cube_merged.fits', combine='median', offsets='offsets.txt') else: logging.info( 'Output already exists and -over- not set - skipping imcombine' ) else: iraf.imcombine(prefix + '*', output='cube_merged.fits', combine='median', offsets='offsets.txt') if im3dtran: # Transpose the cube back to x, y, lambda. if os.path.exists('out.fits'): if over: os.remove('out.fits') iraf.im3dtran(input='cube_merged[*,-*,*]', new_x=1, new_y=3, new_z=2, output='out.fits') else: logging.info( 'Output already exists and -over- not set - skipping final im3dtran' ) else: iraf.im3dtran(input='cube_merged[*,-*,*]', new_x=1, new_y=3, new_z=2, output='out.fits') iraf.fxcopy(input=cubes[0] + '[0], out.fits', output=obsidlist[n] + '_merged.fits') else: iraf.fxcopy(input=cubes[0] + '[0], cube_merged.fits', output=obsidlist[n] + '_merged.fits') mergedCubes.append(obsidlist[n] + '_merged.fits') n += 1 os.chdir(Merged) # Copy the merged observation sequence data cubes to the Merged directory. for i in range(len(mergedCubes)): shutil.copy(Merged + '/' + obsidlist[i] + '/' + mergedCubes[i], './') # Merge all the individual merged observation sequence data cubes. # TODO: test. Still untested. """
str_flats = data_dir + ('[0],' + data_dir).join(s_flats) + '[0]' flats_temp = SRP.outputnames(s_flats, 'temp') str_flats_temp = temp_dir + (',' + temp_dir).join(flats_temp) + '' t_flats = list(set(t_flats) - set(s_flats)) nflats = nflats - np.size(s_flats) print "**** subtract dark from flats ****" iraf.imarith(str_flats, '-', cal_dir + 'master_dark.fits', str_flats_temp) print "**** combine flats ****" iraf.reset(use_new_imt="no") iraf.flpr("0") str_flats_temp = temp_dir + ('[0],' + temp_dir).join(flats_temp) + '[0]' mflatname = temp_dir + 'master_flat_' + str(i) + '.fits' iraf.imcombine(str_flats_temp, mflatname, combine="sum") m_flats.append(mflatname) print "**** Loop it again because IRAF sucks ****" print "**** combine master flats ****" str_flats_temp = ','.join(m_flats)
import getopt
#Python script used to calibrate IR data. from pyraf import iraf try: import pyfits except: import astropy.io.fits as pyfits import sys, os,commands import numpy as np from subprocess import Popen, PIPE import pdb,time #Set image type to fits for the purpose of this calibration. iraf.set(imtype='fits') iraf.reset(clobber='yes') print 'IMPORTANT: All files used in this script must be uncompressed!' def bad_pix(a,s): '''Function designed to return a bad pixel map based on the median and median absolute deviation and replaces anything out of abs(s*mad) with nans''' med=np.median(a) #calculate median absolute deviation mad=np.median(np.abs(a-med)) b=np.float64(a) b[where((b<(med-3*mad))|(b>(med+s*mad)))]=np.nan return b def ask(text,def_val): temp=raw_input(text+' = '+str(def_val)+' =') if len(temp)==0:
# Eduardo Balbinot - June 2010 # #===============================================================================# import sys import os from numpy import * import numpy as np from pyraf import iraf import pyfits import pywcs # Loading necessary IRAF packages iraf.digiphot(_doprint=0) iraf.daophot(_doprint=0) iraf.apphot(_doprint=0) iraf.reset(min_lenuserarea='200000') class bcolor: HEADER = '\033[95m' OKBLUE = '\033[94m\033[1m' OKGREEN = '\033[92m\033[1m' WARNING = '\033[91m\033[1m' FAIL = '\033[91m' ENDC = '\033[0m' def disable(self): self.HEADER = '' self.OKBLUE = '' self.OKGREEN = '' self.WARNING = '' self.FAIL = ''
def start(obsDirList, calDirList, over, start, stop): # Set up the logging file FORMAT = '%(asctime)s %(message)s' DATEFMT = datefmt() logging.basicConfig(filename='main.log', format=FORMAT, datefmt=DATEFMT, level=logging.DEBUG) log = os.getcwd() + '/main.log' logging.info('###############################') logging.info('# #') logging.info('# Start Calibration Reduction #') logging.info('# #') logging.info('###############################') print '###############################' print '# #' print '# Start Calibration Reduction #' print '# #' print '###############################' # Unlearn the used tasks iraf.unlearn(iraf.gemini, iraf.gemtools, iraf.gnirs, iraf.nifs) # Prepare the package for NIFS iraf.nsheaders("nifs", logfile=log) iraf.set(stdimage='imt2048') user_clobber = iraf.envget("clobber") iraf.reset(clobber='yes') path = os.getcwd() # loop over the Calibrations directories and reduce the day cals in each one for calpath in calDirList: os.chdir(calpath) pwdDir = os.getcwd() + "/" iraffunctions.chdir(pwdDir) # define the cals lists and images flatlist = open('flatlist', "r").readlines() flatdarklist = open("flatdarklist", "r").readlines() arcdarklist = open("arcdarklist", "r").readlines() arclist = open("arclist", "r").readlines() ronchilist = open("ronchilist", "r").readlines() calflat = (flatlist[0].strip()).rstrip('.fits') flatdark = (flatdarklist[0].strip()).rstrip('.fits') arcdark = (arcdarklist[0].strip()).rstrip('.fits') arc = (arclist[0].strip()).rstrip('.fits') ronchiflat = (ronchilist[0].strip()).rstrip('.fits') # check start and stop values for reduction steps valindex = start if valindex > stop or valindex < 1 or stop > 6: print "problem with start/stop values" print(valindex, start, stop) while valindex <= stop: #################### ## Prepare raw data if valindex == 1: getShift(calflat, over, log) #################### ## Make flat elif valindex == 2: makeFlat(flatlist, flatdarklist, calflat, flatdark, over, log) #################### ## Combine arc darks elif valindex == 3: makeArcDark(arcdarklist, arcdark, calflat, over, log) #################### ## Combine and flat field arcs elif valindex == 4: reduceArc(arclist, arc, log, over) #################### ## Determine the wavelength of the observation and set the arc coordinate file elif valindex == 5: wavecal("rgn" + arc, log, over) #################### ## Combine arc darks elif valindex == 6: ronchi(ronchilist, ronchiflat, calflat, over, flatdark, log) else: print "No step associated to this value" valindex += 1 os.chdir(path) return
#!/usr/bin/python2 import os os.chdir('..') from pyraf import iraf iraf.images() iraf.noao() iraf.noao.imred() iraf.noao.imred.ccdred() iraf.set(stdimage='imt4096') iraf.reset(imextn='fxf:fts,fit') iraf.reset(imtype='fts,fit') from glob import glob import astropy.io.fits as fits import numpy as np def makeDir(newdir): while (1): if os.path.exists(newdir): print "Directory %s already exists" % newdir return newdir break else: os.mkdir(newdir) print "New Directory '%s' created" % newdir return newdir break def rmFile(fileroot): """Removes file(s) based on root name of the file
def start(kind, telluricDirectoryList="", scienceDirectoryList=""): """ start(kind): Do a full reduction of either Science or Telluric data. nifsReduce- for the telluric and science data reduction. Reduces NIFS telluric and science frames and attempts a flux calibration. Parameters are loaded from runtimeData/config.cfg. This script will automatically detect if it is being run on telluric data or science data. There are 6 steps. INPUT: + Raw files - Science frames - Sky frames + Calibration files - MDF shift file - Bad Pixel Mask (BPM) - Flat field frame - Reduced arc frame - Reduced ronchi mask frame - arc and ronchi database/ files OUTPUT: - If telluric reduction an efficiency spectrum used to telluric correct and absolute flux calibrate science frames - If science reduction a reduced science data cube. Args: kind (string): either 'Telluric' or 'Science'. telluricDirectoryList (string): Used by low memory pipeline. scienceDirectoryList (string): Used by low memory pipeline. """ # TODO(nat): Right now the pipeline will crash if you decide to skip, say, doing a bad # pixel correction. This is because each step adds a prefix to the frame name, and most following # steps depend on that prefix being there. # One way to fix this is if a step is to be skipped, iraf.copy() is called instead to copy the frame and # add the needed prefix. Messy but it might work for now. ########################################################################### ## ## ## BEGIN - GENERAL REDUCTION SETUP ## ## ## ########################################################################### # Store current working directory for later use. path = os.getcwd() # Set up the logging file. log = os.getcwd() + '/Nifty.log' logging.info('\n#################################################') logging.info('# #') logging.info('# Start the NIFS Science and Telluric Reduction #') logging.info('# #') logging.info('#################################################\n') # Set up/prepare IRAF. iraf.gemini() iraf.gemtools() iraf.gnirs() iraf.nifs() # Reset to default parameters the used IRAF tasks. iraf.unlearn(iraf.gemini, iraf.gemtools, iraf.gnirs, iraf.nifs, iraf.imcopy) # From http://bishop.astro.pomona.edu/Penprase/webdocuments/iraf/beg/beg-image.html: # Before doing anything involving image display the environment variable # stdimage must be set to the correct frame buffer size for the display # servers (as described in the dev$graphcap file under the section "STDIMAGE # devices") or to the correct image display device. The task GDEVICES is # helpful for determining this information for the display servers. iraf.set(stdimage='imt2048') # Prepare the IRAF package for NIFS. # NSHEADERS lists the header parameters used by the various tasks in the # NIFS package (excluding headers values which have values fixed by IRAF or # FITS conventions). iraf.nsheaders("nifs", logfile=log) # Set clobber to 'yes' for the script. This still does not make the gemini # tasks overwrite files, so: # YOU WILL LIKELY HAVE TO REMOVE FILES IF YOU RE_RUN THE SCRIPT. user_clobber = iraf.envget("clobber") iraf.reset(clobber='yes') # This helps make sure all variables are initialized to prevent bugs. scienceSkySubtraction = None scienceOneDExtraction = None extractionXC = None extractionYC = None extractionRadius = None telluricSkySubtraction = None # Load reduction parameters from runtimeData/config.cfg. with open('./config.cfg') as config_file: config = ConfigObj(config_file, unrepr=True) # Read general pipeline config. over = config['over'] manualMode = config['manualMode'] calDirList = config['calibrationDirectoryList'] scienceOneDExtraction = config['scienceOneDExtraction'] extractionXC = config['extractionXC'] extractionYC = config['extractionYC'] extractionRadius = config['extractionRadius'] if kind == 'Telluric': # Telluric reduction specific config. telluricReductionConfig = config['telluricReductionConfig'] if telluricDirectoryList: observationDirectoryList = telluricDirectoryList elif not telluricDirectoryList: observationDirectoryList = config['telluricDirectoryList'] start = telluricReductionConfig['telStart'] stop = telluricReductionConfig['telStop'] telluricSkySubtraction = telluricReductionConfig[ 'telluricSkySubtraction'] if kind == 'Science': # Science reduction specific config. scienceReductionConfig = config['scienceReductionConfig'] if scienceDirectoryList: observationDirectoryList = scienceDirectoryList elif not scienceDirectoryList: observationDirectoryList = config['scienceDirectoryList'] start = scienceReductionConfig['sciStart'] stop = scienceReductionConfig['sciStop'] scienceSkySubtraction = scienceReductionConfig[ 'scienceSkySubtraction'] ########################################################################### ## ## ## COMPLETE - GENERAL REDUCTION SETUP ## ## ## ########################################################################### # nifsReduce has two nested loops that reduced data. # It loops through each science (or telluric) directory, and # runs through a series of calibrations steps on the data in that directory. # Loop through all the observation (telluric or science) directories to perform a reduction on each one. for observationDirectory in observationDirectoryList: ########################################################################### ## ## ## BEGIN - OBSERVATION SPECIFIC SETUP ## ## ## ########################################################################### # Print the current directory of data being reduced. logging.info( "\n#################################################################################" ) logging.info(" ") logging.info(" Currently working on reductions in") logging.info(" in " + str(observationDirectory)) logging.info(" ") logging.info( "#################################################################################\n" ) os.chdir(observationDirectory) tempObs = observationDirectory.split(os.sep) obsid = tempObs[-1] # Change the iraf directory to the current directory. pwd = os.getcwd() iraffunctions.chdir(pwd) # Copy relevant calibrations over to the science directory. # Open and store the name of the MDF shift reference file from shiftfile into shift. shift = 'calibrations/shiftFile' # Open and store the name of the flat frame flat = 'calibrations/finalFlat' # Open and store the bad pixel mask finalBadPixelMask = 'calibrations/finalBadPixelMask' # Ronchi, arc and database must all be in local calibrations directory # Open and store the name of the reduced spatial correction ronchi flat frame name from ronchifile in ronchi. ronchi = 'finalRonchi' # Open and store the name of the reduced wavelength calibration arc frame from arclist in arc. arc = 'finalArc' if os.path.exists(os.getcwd() + '/' + ronchi + ".fits"): if over: iraf.delete(os.getcwd() + '/calibrations/finalRonchi.fits') # Copy the spatial calibration ronchi flat frame from Calibrations_grating to the observation directory. shutil.copy(os.getcwd() + '/calibrations/finalRonchi.fits', ronchi + '.fits') else: print "\nOutput exists and -over not set - skipping copy of reduced ronchi" else: shutil.copy(os.getcwd() + '/calibrations/finalRonchi.fits', ronchi + '.fits') if os.path.exists(os.getcwd() + '/' + arc + ".fits"): if over: iraf.delete(os.getcwd() + '/calibrations/finalArc.fits') # Copy the spatial calibration arc flat frame from Calibrations_grating to the observation directory. shutil.copy(os.getcwd() + '/calibrations/finalArc.fits', arc + '.fits') else: print "\nOutput exists and -over not set - skipping copy of reduced arc" else: shutil.copy(os.getcwd() + '/calibrations/finalArc.fits', arc + '.fits') # Make sure the database files are in place. Current understanding is that # these should be local to the reduction directory, so need to be copied from # the calDir. if os.path.isdir("./database"): if over: shutil.rmtree("./database") os.mkdir("./database") for item in glob.glob("calibrations/database/*"): shutil.copy(item, "./database/") else: print "\nOutput exists and -over not set - skipping copy of database directory" else: os.mkdir('./database/') for item in glob.glob("calibrations/database/*"): shutil.copy(item, "./database/") if telluricSkySubtraction or scienceSkySubtraction: # Read the list of sky frames in the observation directory. try: skyFrameList = open("skyFrameList", "r").readlines() skyFrameList = [frame.strip() for frame in skyFrameList] except: logging.info( "\n#####################################################################" ) logging.info( "#####################################################################" ) logging.info("") logging.info( " WARNING in reduce: No sky frames were found in a directory." ) logging.info(" Please make a skyFrameList in: " + str(os.getcwd())) logging.info("") logging.info( "#####################################################################" ) logging.info( "#####################################################################\n" ) raise SystemExit sky = skyFrameList[0] # If we are doing a telluric reduction, open the list of telluric frames in the observation directory. # If we are doing a science reduction, open the list of science frames in the observation directory. if kind == 'Telluric': tellist = open('tellist', 'r').readlines() tellist = [frame.strip() for frame in tellist] elif kind == 'Science': scienceFrameList = open("scienceFrameList", "r").readlines() scienceFrameList = [frame.strip() for frame in scienceFrameList] # For science frames, check to see if the number of sky frames matches the number of science frames. # IF NOT duplicate the sky frames and rewrite the sky file and skyFrameList. if scienceSkySubtraction: if not len(skyFrameList) == len(scienceFrameList): skyFrameList = makeSkyList(skyFrameList, scienceFrameList, observationDirectory) ########################################################################### ## ## ## COMPLETE - OBSERVATION SPECIFIC SETUP ## ## BEGIN DATA REDUCTION FOR AN OBSERVATION ## ## ## ########################################################################### # Check start and stop values for reduction steps. Ask user for a correction if # input is not valid. valindex = start while valindex > stop or valindex < 1 or stop > 6: logging.info( "\n#####################################################################" ) logging.info( "#####################################################################" ) logging.info("") logging.info( " WARNING in reduce: invalid start/stop values of observation" ) logging.info(" reduction steps.") logging.info("") logging.info( "#####################################################################" ) logging.info( "#####################################################################\n" ) valindex = int( raw_input( "\nPlease enter a valid start value (1 to 7, default 1): ") ) stop = int( raw_input( "\nPlease enter a valid stop value (1 to 7, default 7): ")) while valindex <= stop: ########################################################################### ## STEP 1: Prepare raw data; science, telluric and sky frames ->n ## ########################################################################### if valindex == 1: if manualMode: a = raw_input( "About to enter step 1: locate the spectrum.") if kind == 'Telluric': tellist = prepare(tellist, shift, finalBadPixelMask, log, over) elif kind == 'Science': scienceFrameList = prepare(scienceFrameList, shift, finalBadPixelMask, log, over) if telluricSkySubtraction or scienceSkySubtraction: skyFrameList = prepare(skyFrameList, shift, finalBadPixelMask, log, over) logging.info( "\n##############################################################################" ) logging.info("") logging.info( " STEP 1: Locate the Spectrum (and prepare raw data) ->n - COMPLETED " ) logging.info("") logging.info( "##############################################################################\n" ) ########################################################################### ## STEP 2: Sky Subtraction ->sn ## ########################################################################### elif valindex == 2: if manualMode: a = raw_input("About to enter step 2: sky subtraction.") # Combine telluric sky frames. if kind == 'Telluric': if telluricSkySubtraction: if len(skyFrameList) > 1: combineImages(skyFrameList, "gn" + sky, log, over) else: copyImage(skyFrameList, 'gn' + sky + '.fits', over) skySubtractTel(tellist, "gn" + sky, log, over) else: for image in tellist: iraf.copy('n' + image + '.fits', 'sn' + image + '.fits') if kind == 'Science': if scienceSkySubtraction: skySubtractObj(scienceFrameList, skyFrameList, log, over) else: for image in scienceFrameList: iraf.copy('n' + image + '.fits', 'sn' + image + '.fits') logging.info( "\n##############################################################################" ) logging.info("") logging.info(" STEP 2: Sky Subtraction ->sn - COMPLETED ") logging.info("") logging.info( "##############################################################################\n" ) ############################################################################## ## STEP 3: Flat field, slice, subtract dark and correct bad pixels ->brsn ## ############################################################################## elif valindex == 3: if manualMode: a = raw_input( "About to enter step 3: flat fielding and bad pixels correction." ) if kind == 'Telluric': applyFlat(tellist, flat, log, over, kind) fixBad(tellist, log, over) elif kind == 'Science': applyFlat(scienceFrameList, flat, log, over, kind) fixBad(scienceFrameList, log, over) logging.info( "\n##############################################################################" ) logging.info("") logging.info( " STEP 3: Flat fielding and Bad Pixels Correction ->brsn - COMPLETED " ) logging.info("") logging.info( "##############################################################################\n" ) ########################################################################### ## STEP 4: Derive and apply 2D to 3D transformation ->tfbrsn ## ########################################################################### elif valindex == 4: if manualMode: a = raw_input( "About to enter step 4: 2D to 3D transformation and Wavelength Calibration." ) if kind == 'Telluric': fitCoords(tellist, arc, ronchi, log, over, kind) transform(tellist, log, over) elif kind == 'Science': fitCoords(scienceFrameList, arc, ronchi, log, over, kind) transform(scienceFrameList, log, over) logging.info( "\n##############################################################################" ) logging.info("") logging.info( " STEP 4: 2D to 3D transformation and Wavelength Calibration ->tfbrsn - COMPLETED " ) logging.info("") logging.info( "##############################################################################\n" ) ############################################################################ ## STEP 5 (tellurics): For telluric data derive a telluric ## ## correction ->gxtfbrsn ## ## STEP 5 (science): For science apply an efficiency correction and make ## ## a data cube (not necessarily in that order). ## ## (i) Python method applies correction to nftransformed cube. ## ## Good for faint objects. ->cptfbrsn ## ## (ii) iraf.telluric method applies correction to ## ## nftransformed result (not quite a data cube) then ## ## nftransforms cube. ->catfbrsn ## ## (iii) If no telluric correction/flux calibration to be ## ## applied make a plain data cube. ->ctfbrsn ## ############################################################################ elif valindex == 5: if manualMode: a = raw_input("About to enter step 5.") # For telluric data: # Make a combined extracted 1D standard star spectrum. if kind == 'Telluric': extractOneD(tellist, kind, log, over, extractionXC, extractionYC, extractionRadius) # TODO(nat): add this as a parameter; encapsulate this. copyToScience = True if copyToScience: # Copy final extracted results to science directory. try: with open("scienceMatchedTellsList", "r") as f: lines = f.readlines() lines = [x.strip() for x in lines] for i in range(len(lines)): if "obs" in lines[i]: k = 1 while i + k != len( lines) and "obs" not in lines[i + k]: copyResultsToScience( "gxtfbrsn" + tellist[0] + ".fits", "0_tel" + lines[i + k] + ".fits", over) k += 1 except IOError: logging.info( "\nNo scienceMatchedTellsList found in " + os.getcwd() + " . Skipping copy of extracted spectra to science directory." ) logging.info( "\n##############################################################################" ) logging.info("") logging.info( " STEP 5a: Extract 1D Spectra and Make Combined 1D Standard Star Spectrum" ) logging.info(" ->gxtfbrsn - COMPLETED") logging.info("") logging.info( "##############################################################################\n" ) #TODO(nat): add this as a parameter. makeTelluricCube = True if makeTelluricCube: makeCube('tfbrsn', tellist, log, over) logging.info( "\n##############################################################################" ) logging.info("") logging.info( " STEP 5b: Make uncorrected standard star data cubes, ->ctfbrsn - COMPLETED" ) logging.info("") logging.info( "##############################################################################\n" ) # For Science data: # Possibly extract 1D spectra, and make uncorrected cubes. elif kind == 'Science': if scienceOneDExtraction: extractOneD(scienceFrameList, kind, log, over, extractionXC, extractionYC, extractionRadius) copyExtracted(scienceFrameList, over) logging.info( "\n##############################################################################" ) logging.info("") logging.info( " STEP 5a: Make extracted 1D Science spectra, ->ctgbrsn - COMPLETED" ) logging.info("") logging.info( "##############################################################################\n" ) makeCube('tfbrsn', scienceFrameList, log, over) # TODO(nat): encapsulate this inside a function. if os.path.exists('products_uncorrected'): if over: shutil.rmtree('products_uncorrected') os.mkdir('products_uncorrected') else: logging.info( "\nOutput exists and -over not set - skipping creating of products_uncorrected directory" ) else: os.mkdir('products_uncorrected') for item in scienceFrameList: if os.path.exists('products_uncorrected/ctfbrsn' + item + '.fits'): if over: os.remove('products_uncorrected/ctfbrsn' + item + '.fits') shutil.copy( 'ctfbrsn' + item + '.fits', 'products_uncorrected/ctfbrsn' + item + '.fits') else: logging.info( "\nOutput exists and -over not set - skipping copy of uncorrected cube" ) else: shutil.copy( 'ctfbrsn' + item + '.fits', 'products_uncorrected/ctfbrsn' + item + '.fits') if os.path.exists('products_telluric_corrected'): if over: shutil.rmtree('products_telluric_corrected') os.mkdir('products_telluric_corrected') else: logging.info( "\nOutput exists and -over not set - skipping creating of products_telluric_corrected directory" ) else: os.mkdir('products_telluric_corrected') for item in scienceFrameList: if os.path.exists( 'products_telluric_corrected/ctfbrsn' + item + '.fits'): if over: os.remove( 'products_telluric_corrected/ctfbrsn' + item + '.fits') shutil.copy( 'ctfbrsn' + item + '.fits', 'products_telluric_corrected/ctfbrsn' + item + '.fits') else: logging.info( "\nOutput exists and -over not set - skipping copy of uncorrected cube" ) else: shutil.copy( 'ctfbrsn' + item + '.fits', 'products_telluric_corrected/ctfbrsn' + item + '.fits') logging.info( "\n##############################################################################" ) logging.info("") logging.info( " STEP 5b: Make uncorrected science data cubes, ->ctfbrsn - COMPLETED" ) logging.info("") logging.info( "##############################################################################\n" ) valindex += 1 logging.info( "\n##############################################################################" ) logging.info("") logging.info(" COMPLETE - Reductions completed for " + str(observationDirectory)) logging.info("") logging.info( "##############################################################################\n" ) # Return to directory script was begun from. os.chdir(path)
""" Display Gattini images using the program C{ds9}. """ import os import time from pyraf import iraf iraf.reset(stdimage="imt1600") from processing.new_ops import produce_flat, get_cam_flat_filename def _prep_ds9(): """ Open up an instance of C{ds9} if one is not already running. """ if len( os.popen("ps aux | grep ds9 | grep $USER | grep -v grep"). readlines()) == 0: os.system("ds9 &") time.sleep(3) def display(outfile, frame=1): """ Display a given file, using an optional frame number. """ iraf.display(outfile, frame)