def __init__(self, imList, target="", steps={}, rawPath="./", calPath="", \ mdf="", biasIm="", flatList="", arcDir="", arcList="", sFunc="", \ logRoot=""): self.target = target self.steps = steps self.rawPath = rawPath self.calPath = calPath self.bias = self.calPath + biasIm self.mdf = mdf self.imList = imList self.arcDir = arcDir self.arcList = arcList if len(flatList) > 0: self.flats = utils.getImPaths(flatList) self.sFunc = self.calPath + sFunc self.logRoot = logRoot return
def checkMDF(self, rawMDF, flatList, **kwargs): # copy raw MDF to local directory mdf = rawMDF if not os.path.exists(mdf): print "\nFETCHING RAW MDF" mdfPath = "/Users/roedigerj/anaconda/envs/astroconda/iraf_extern/gemini/gmos/data/" rawMDF = mdfPath + rawMDF shutil.copy(rawMDF, "./") # TODO: check that flatList is not empty # grab the name of a single flat to reduce im = utils.getImPaths(flatList)[0] print "\nCHECKING MDF WITH " + im # repeat the following steps until the MDF deemed correct while True: # do basic reduction of selected exposure _ = self.reduceIm(im, fl_inter="no", fl_gscrrej="no", \ fl_extract="no", fl_wavtran="no", fl_skysub="no", \ fl_fluxcal="no", mdffile=mdf, mdfdir="./", verbose="no", \ **kwargs) # extract spectrum to check match between MDF and IFU flat logFile = "" if "logfile" in kwargs.keys(): logFile = kwargs["logfile"] iraf.delete("database/aperg" + im[:im.find(".")] + "*", verify="yes") self.extractSpec("rg" + im, fl_inter="yes", logfile=logFile, \ verbose="no") # TODO: remove any extracted files # call tcalc to fix MDF (if necessary) fixMDF = bool(input("\nDoes MDF need fixing? (True/False): ")) if fixMDF: fiber = input("Fiber to fix: ") flag = input("Flag value: ") iraf.tcalc( mdf, "BEAM", "if NO == " + str(fiber) + "then " + str(flag) + " else BEAM") else: break return
def stackCubes(self, cubeList, **kwargs): outCube = self.target + "_finalCube.fits" # extract filenames of individual data cubes cubePaths = utils.getImPaths(cubeList) cubes = ", ".join(cubePaths) print "\nCOMBINING DATA CUBES: " + cubes # make work directory and copy images there cmd = "mkdir -v scratch/" os.system(cmd) for i, cube in enumerate(cubePaths): cmd = "cp -v " + cube + " scratch/" os.system(cmd) cubePaths[i] = "scratch/" + cube # update the WCS header cards of the cubes to their centroids # TODO: allow for lower pixel limit? # TODO: put calls to pyfalign and pyfmosaic into iterative loop? pyfu.pyfalign(cubePaths, method="centroid", llimit=0) # resample and align the cubes without stacking them # TODO: add line below to view alignment iraf.imdelete("scratch/separatedCubes.fits") pyfu.pyfmosaic(cubePaths, "scratch/separatedCubes.fits", separate="yes") # stack the aligned cubes and view the result # (both as white-light image and slow-paced movie) iraf.imdelete(outCube) pyfu.pyfmosaic(cubePaths, outCube, propvar="yes") self.viewWhiteIm(outCube) time.sleep(30) self.viewCubeMov(outCube) # remove work directory and its contents cmd = "rm scratch/*.fits" os.system(cmd) cmd = "rmdir scratch/" os.system(cmd) return
def run(self): imPaths = utils.getImPaths(self.imList) for i, im in enumerate(imPaths): # attach MDF, subtract the bias+overscan, and trim the overscan if self.steps["reduceIm"]: logFile = self.logRoot + "_reduceIm.log" # TODO: remove hard-coding of slits and mdfdir parms self.reduceIm(im, rawpath=self.rawPath, slits="both", \ fl_over="yes", fl_trim="yes", fl_gscrrej="no", \ fl_wavtran="no", fl_skysub="no", fl_extract="no", \ fl_fluxcal="no", fl_inter="no", fl_vardq="yes", \ bias=self.bias, mdffile=self.mdf, mdfdir="./", \ logfile=logFile, verbose="no") # model and subtract the scattered light if self.steps["subScatLgt"]: self.subScatLgt("rg" + im, self.calPath + "blkMask_" + \ self.flats[i], prefix = "b", fl_inter="yes", cross="yes") # clean the cosmic rays if self.steps["cleanCosRays"]: logFile = self.logRoot + "_cleanCosRays.log" self.cleanCosRays("brg" + im, fl_vardq="yes", xorder=9, \ yorder=-1, sigclip=4.5, sigfrac=0.5, objlim=1.0, niter=4, \ key_ron="RDNOISE", key_gain="GAIN", logfile=logFile, \ verbose="no") # correct for QE changes if self.steps["correctQE"]: logFile = self.logRoot + "_correctQE.log" # retrieve arc of matching central wavelength arc = utils.matchCentWave(im, self.rawPath, self.arcList, "arc/") refIm = "erg" + arc os.system("mv " + self.calPath + refIm + " ./") self.correctQE("xbrg" + im, refimages=refIm, fl_correct="yes", \ fl_vardq="yes", logfile=logFile, verbose="no") # return arc from whence it came os.system("mv " + refIm + " " + self.calPath) # extract the spectra if self.steps["extractSpec"]: logFile = self.logRoot + "_extractSpec.log" # retrieve flat of matching central wavelength # TODO: set flat through call to matchCentWave? flat = self.flats[i] refIm = "eqbrg" + flat os.system("mv " + self.calPath + refIm + " ./") respIm = self.calPath + flat[:flat.find(".")] + "_resp.fits" self.extractSpec("qxbrg" + im, reference=refIm, \ response=respIm, fl_inter="no", fl_vardq="yes", \ recenter="no", trace="no", weights="none", logfile=logFile, \ verbose="no") # return flat from whence it came os.system("mv " + refIm + " " + self.calPath) # TODO: place this in extractSpec # view extracted spectra in detector plane #for j in range(1, 3): # print "\nDISPLAYING eqxbrg" + im + "[SCI," + str(j) + "]" # iraf.display("eqxbrg" + im + "[SCI," + str(j) + "]") # continue # adjust the mask to cover cosmetics if self.steps["maskSpec"]: hdu = fits.open("eqxbrg" + im) # create a separate mask for each science extension for j in range(1, hdu[-1].header["EXTVER"] + 1): txtMask = "mask_" + im[:im.find(".")] + "_" + str( j) + ".txt" ext = "[SCI," + str(j) + "]" self.adjustDQ("eqxbrg" + im, ext, txtMask) # rectify the extracted spectra if self.steps["rectifySpec"]: logFile = self.logRoot + "_rectifySpec.log" # retrieve arc of matching central wavelength arc = utils.matchCentWave(im, self.rawPath, self.arcList, "arc/") refIm = "erg" + arc os.system("mv " + self.calPath + refIm + " ./") # use first spectra to inform choice of final wavelength sampling print "\nRECTIFYING SPECTRA IN", "xeqxbrg" + im if i == 0: self.rectifySpec("xeqxbrg" + im, wavtraname=refIm, \ fl_vardq="no", dw="INDEF", logfile=logFile, verbose="no") dw = input("\nDesired wavelength sampling: ") self.rectifySpec("xeqxbrg" + im, wavtraname=refIm, \ fl_vardq="yes", dw=dw, logfile=logFile, verbose="no") # return arc from whence it came os.system("mv " + refIm + " " + self.calPath) # subtract the sky from the object fibers if self.steps["subSky"]: logFile = self.logRoot + "_subSky.log" self.subSky("txeqxbrg" + im, fl_inter="no", logfile=logFile, \ verbose="no") # flux calibrate the spectra if self.steps["calibFlux"]: logFile = self.logRoot + "_calibFlux.log" self.calibFlux("stxeqxbrg" + im, fl_vardq="yes", fl_ext="yes", \ logfile=logFile, verbose="no") # resample the data cube if self.steps["resampCube"]: logFile = self.logRoot + "_resampCube.log" self.resampCube("cstxeqxbrg" + im, fl_atmdisp="yes", \ fl_var="yes", fl_dq="yes", logfile=logFile) continue # stack data cubes if self.steps["stackCubes"]: logFile = self.logRoot + "_stackCubes.log" # make list of data cubes cubeList = "cubeFiles.txt" cmd = "ls dcstxeqxbrg* > " + cubeList os.system(cmd) # combine data cubes self.stackCubes(cubeList) # remove list cmd = "rm -v " + cubeList os.system(cmd) return
def run(self): # view bias exposures #utils.viewIms(self.biasList, sat="yes") # produce master bias logFile = "v1895_reduceBias.log" masterBias = "v1895_masterBias.fits" #self.reduceBias(self.biasList, masterBias, rawpath="bias/", \ # fl_inter="no", fl_vardq="yes", logfile=logFile, verbose="no") #util.viewIm(masterBias) # view flat exposures #utils.viewIms(self.flatList, sat="yes") # check MDF (and modify, if necessary) rawMDF = "gsifu_slits_mdf.fits" logFile = "v1895_checkMDF.log" #self.checkMDF(rawMDF, self.flatList, slits="both", rawpath="flat/", \ # bias=masterBias, logfile=logFile) # create fiber trace reference (for each central wavelength) # TODO: replace two calls to reduceIm with single call to reduceFlats mdf = "gsifu_slits_mdf.fits" logFile = "v1895_fiberTrace.log" rawFlat = "S20080405S0066" #v1895_cal.reduceIm(rawFlat, slits="both", rawpath="flat/", \ # fl_gscrrej="no", fl_wavtran="no", fl_skysub="no", fl_fluxcal="no", \ # fl_inter="no", fl_vardq="yes", mdffile=mdf, mdfdir="./", \ # bias=masterBias, logfile=logFile, verbose="no") rawFlat = "S20080405S0070" #v1895_cal.reduceIm(rawFlat, slits="both", rawpath="flat/", \ # fl_gscrrej="no", fl_wavtran="no", fl_skysub="no", fl_fluxcal="no", \ # fl_inter="no", fl_vardq="yes", mdffile=mdf, mdfdir="./", \ # bias=masterBias, logfile=logFile, verbose="no") # determine wavelength solution for each central wavelength logFile = "v1895_waveCal.log" arcs = ["S20080405S0109", "S20080405S0110"] extFlats = ["ergS20080405S0066", "ergS20080405S0070"] for i in range(2): #_ = v1895_cal.reduceIm(arcs[i], slits="both", rawpath="arc/", \ # fl_bias="no", fl_gscrrej="no", fl_wavtran="no", fl_skysub="no", \ # fl_fluxcal="no", fl_inter="no", mdffile=mdf, mdfdir="./", \ # reference=extFlats[i], recenter="no", trace="no", \ # logfile=logFile, verbose="no") continue arcList = "@arcFiles.txt" #v1895_cal.calWave("erg" + arcList, coordlist="gmos$data/GCALcuar.dat", \ # fl_inter="yes", threshold=25., nlost=10, ntarget=15, \ # logfile=logFile, verbose="no") # reduce the lamp flat (incl. removal of scattered light and QE correction) logFile = "v1895_subScat.log" #v1895_cal.reduceFlats(self.flatList, subscat="yes", rawpath="flat/", \ # slits="both", fl_gscrrej="no", fl_wavtran="no", fl_skysub="no", \ # fl_fluxcal="no", fl_inter="no", fl_vardq="yes", mdffile=mdf, \ # mdfdir="./", bias=masterBias, logfile=logFile, verbose="no") # TODO: replace loop with call to reduceFlats arc = "S20080405S0109" imList = "flatFiles_473.txt" logFile = "v1895_corrQE.log" imPaths = utils.getImPaths(imList) for im in imPaths: #v1895_cal.reduceIm("brg" + im, slits="both", fl_addmdf="no", \ # fl_bias="no", fl_over="no", fl_trim="no", fl_qecorr="yes", \ # fl_gscrrej="no", fl_wavtran="no", fl_skysub="no", \ # fl_extract="yes", fl_fluxcal="no", fl_inter="no", \ # fl_vardq="yes", qe_refim="erg" + arc, mdffile=mdf, mdfdir='./', \ # logfile=logFile, verbose="no") #v1895_cal.viewCube("eqbrg" + im) continue # TODO: replace loop with call to reduceFlats arc = "S20080405S0110" imList = "flatFiles_478.txt" imPaths = utils.getImPaths(imList) for im in imPaths: #v1895_cal.reduceIm("brg" + im, slits="both", fl_addmdf="no", \ # fl_bias="no", fl_over="no", fl_trim="no", fl_qecorr="yes", \ # fl_gscrrej="no", fl_wavtran="no", fl_skysub="no", \ # fl_extract="yes", fl_fluxcal="no", fl_inter="no", \ # fl_vardq="yes", qe_refim="erg" + arc, mdffile=mdf, mdfdir='./', \ # logfile=logFile, verbose="no") #v1895_cal.viewCube("eqbrg" + im) continue # calculate response function for each flat exposure imList = "flatFiles.txt" logFile = "v1895_respFunc.log" imPaths = utils.getImPaths(imList) for im in imPaths: v1895_cal.compResp("eqbrg" + im, skyimage="", fl_inter="no", \ fl_fit="yes", function="spline3", order=45, sample="*", \ logfile=logFile, verbose="no") continue return
def reduceFlats(self, flatList, inPref="", arcList="", arcPath="", \ subscat="no", **kwargs): print "\nREDUCING FLATS" # retrieve filenames of individual exposures flatPaths = utils.getImPaths(flatList) # process flats one at a time for flat in flatPaths: # make additional preparations if QE correction requested if "fl_qecorr" in kwargs.keys() and kwargs["fl_qecorr"] == "yes": # move extracted arcs to working directory refIm = "erg" + utils.matchCentWave(inPref + flat, "./", \ arcList, "arc/") os.system("mv " + arcPath + refIm + " ./") # add reference image to kwargs kwargs["qe_refim"] = refIm # model and remove the scattered light, if requested; # otherwise, reduce the image if subscat == "yes": # TODO: display all science extensions # find file holding extracted flat spectra # TODO: determination of extFlat probably needs improvement dirList = os.listdir("./") descendList = [im for im in dirList if flat in im] extFlat = [im for im in descendList if "erg" in im][0] print extFlat # find gaps between fiber bundles on detector gapFile = "blkMask_" + flat[:flat.find(".")] + ".txt" iraf.delete(gapFile) iraf.gffindblocks(extFlat[1:], extFlat, gapFile) # model and remove scattered light, if necessary # (and repeat until satisfied) utils.examIm(extFlat[1:] + "[SCI]", frame=1) while True: fixScat = bool( input( "Does subtraction of scattered light need improvement? (True/False): " )) if fixScat: xOrders = raw_input("X orders (csv): ") yOrders = raw_input("Y orders (csv): ") iraf.imdelete("b" + extFlat[1:]) iraf.gfscatsub(extFlat[1:], gapFile, fl_inter="yes", \ prefix="b", xorder=xOrders, yorder=yOrders, \ cross="yes") utils.examIm("b" + extFlat[1:] + "[SCI]", frame=1) else: break else: # reduce the image outPref = self.reduceIm(inPref + flat, **kwargs) # TODO: for this to work, need to know the proper prefix ... # (have reduceIm return the prefix?) self.viewCube(outPref + inPref + flat) pass # return extracted arcs to their rightful place if "fl_qecorr" in kwargs.keys() and kwargs["fl_qecorr"] == "yes": os.system("mv " + refIm + " " + arcPath) return