def make_single_extension(fnlist, newfnlist): """Converts fits images to the single-extension format that is more compatible with IRAF, CFITSIO, etc. Does this via the pysalt task 'salt2iraf'. Inputs: fnlist -> List of strings, each one the location of a multi-extension image newfnlist -> List of strings, locations for the new single-extension images """ #Open various iraf packages iraf.pysalt(_doprint=0) iraf.saltred(_doprint=0) #Run salt2iraf on each image for i in range(len(fnlist)): iraf.salt2iraf(images=fnlist[i],outimages=newfnlist[i],outpref="") return
global iraf from pyraf import iraf import numpy as np import pyfits from glob import glob import os iraf.pysalt() iraf.saltspec() iraf.saltred() iraf.set(clobber='YES') iraf.noao() iraf.twodspec() iraf.longslit() def tofits(filename, data, hdr=None, clobber=False): """simple pyfits wrapper to make saving fits files easier.""" from pyfits import PrimaryHDU, HDUList hdu = PrimaryHDU(data) if hdr is not None: hdu.header = hdr hdulist = HDUList([hdu]) hdulist.writeto(filename, clobber=clobber, output_verify='ignore') def get_ims(fs, imtype): imtypekeys = {'sci': 'OBJECT', 'arc': 'ARC', 'flat': 'FLAT'} ims = [] grangles = [] for f in fs:
def load_modules(): # Define a function to load all of the modules so that they don't' import # unless we need them global iraf from pyraf import iraf iraf.pysalt() iraf.saltspec() iraf.saltred() iraf.set(clobber='YES') global sys import sys global os import os global shutil import shutil global glob from glob import glob global pyfits import pyfits global np import numpy as np global lacosmicx import lacosmicx global interp from scipy import interp global signal from scipy import signal global ndimage from scipy import ndimage global interpolate from scipy import interpolate global WCS from astropy.wcs import WCS global optimize from scipy import optimize global ds9 import pyds9 as ds9 global GaussianProcess from sklearn.gaussian_process import GaussianProcess global pandas import pandas iraf.onedspec() iraf.twodspec() iraf.longslit() iraf.apextract() iraf.imutil() iraf.rvsao(motd='no')
def pipeline(rawdir="raw", mode="halpha"): """Runs successive steps of the saltfp data reduction, checking along the way to see if each step was successful. This is the main driver program of the SALT Fabry-Perot pipeline. Inputs: rawdir -> String, containing the path to the 'raw' directory. By default, this is 'raw' mode -> Mode for velocity fitting. Currently the only option is H-Alpha line fitting. """ # Set rest wave based on the mode called if mode == "halpha": rest_wave = 6562.81 # Create product directory if isdir("product"): while True: yn = raw_input("Product directory already exists. " + "Recreate it? (y/n) ") if "n" in yn or "N" in yn: break elif "y" in yn or "Y" in yn: # Confirmation yn = raw_input("Are you sure? This takes a while. (y/n) ") if ("y" in yn or "Y" in yn) and not ("n" in yn or "N" in yn): rmtree("product") break if not isdir("product"): # Acquire the list of filenames from the raw directory fnlist = sorted(listdir(rawdir)) for i in range(len(fnlist)): fnlist[i] = join(rawdir, fnlist[i]) # Run the first two steps of imred on the first image iraf.pysalt(_doprint=0) iraf.saltred(_doprint=0) iraf.saltprepare( fnlist[0], "temp.fits", "", createvar=False, badpixelimage="", clobber=True, logfile="temp.log", verbose=True, ) iraf.saltbias( "temp.fits", "temp.fits", "", subover=True, trim=True, subbias=False, masterbias="", median=False, function="polynomial", order=5, rej_lo=3.0, rej_hi=5.0, niter=10, plotover=False, turbo=False, clobber=True, logfile="temp.log", verbose=True, ) # Create the bad pixel mask image = fits.open("temp.fits") for i in range(1, len(image)): mask = image[i].data != image[i].data image[i].data = 1 * mask image.writeto("badpixmask.fits", clobber="True") image.close() # Remove temporary files remove("temp.fits") remove("temp.log") # Run the raw images through the first few data reduction pipeline # steps imred(fnlist, "product", bpmfile="badpixmask.fits") # Delete the temporary bad pixel mask remove("badpixmask.fits") # Move these raw images into the product directory mkdir("product") fnlist = sorted(listdir(".")) for i in range(len(fnlist)): if "mfxgbpP" in fnlist[i] and ".fits" in fnlist[i]: move(fnlist[i], join("product", fnlist[i])) # List of files in the product directory fnlist = sorted(listdir("product")) for i in range(len(fnlist)): fnlist[i] = join("product", fnlist[i]) # Manual verification of fits images and headers firstimage = FPImage(fnlist[0]) verify = firstimage.verifytog firstimage.close() if verify is None: while True: prompt = "Manually verify image contents? (Recommended) (y/n) " yn = raw_input(prompt) if "n" in yn or "N" in yn: print ("Skipping manual verification of image contents " + "(Not recommended)") break if "y" in yn or "Y" in yn: fnlist = verify_images(fnlist) break # Make separate lists of the different fits files (flatlist, list_of_objs, objlists, list_of_filts, filtlists) = separate_lists(fnlist) # Masking of pixels outside the aperture firstimage = FPImage(objlists[0][0]) axcen = firstimage.axcen firstimage.close() if axcen is None: print "Masking pixels outside the RSS aperture..." axcen, aycen, arad = get_aperture(objlists[0][0]) aperture_mask(fnlist, axcen, aycen, arad) else: print "Images have already been aperture-masked." # Masking bad pixels from external region file for objlist in objlists: for i in range(len(objlist)): if isfile(splitext(split(objlist[i])[1])[0] + ".reg"): print ("Adding regions from file " + splitext(split(objlist[i])[1])[0] + ".reg to the bad pixel mask.") mask_regions(objlist[i], splitext(split(objlist[i])[1])[0] + ".reg") # Measure stellar FWHMs firstimage = FPImage(objlists[0][0]) fwhm = firstimage.fwhm firstimage.close() if fwhm is None: dofwhm = True else: while True: yn = raw_input("Seeing FWHM has already been measured. " + "Redo this? (y/n) ") if "n" in yn or "N" in yn: dofwhm = False break elif "y" in yn or "Y" in yn: dofwhm = True break if dofwhm: print "Measuring seeing FWHMs..." for objlist in objlists: measure_fwhm(objlist) # Find image centers using ghost pairs for i in range(len(objlists)): firstimage = FPImage(objlists[i][0]) xcen = firstimage.xcen deghosted = firstimage.ghosttog firstimage.close() if deghosted is None: if xcen is None: ghosttog = True else: while True: yn = raw_input( "Optical centers already measured for " + "object " + list_of_objs[i] + ". Redo this? (y/n) " ) if "n" in yn or "N" in yn: ghosttog = False break elif "y" in yn or "Y" in yn: ghosttog = True break if ghosttog: print ( "Identifying optical centers for object " + list_of_objs[i] + ". This may take a while for crowded fields..." ) find_ghost_centers(objlists[i]) # Deghost images for i in range(len(objlists)): firstimage = FPImage(objlists[i][0]) deghosted = firstimage.ghosttog firstimage.close() if deghosted is None: print "Deghosting images for object " + list_of_objs[i] + "..." for j in range(len(objlists[i])): deghost(objlists[i][j]) else: print ("Images for object " + list_of_objs[i] + " have already been deghosted.") # Image Flattening firstimage = FPImage(objlists[0][0]) flattog = firstimage.flattog firstimage.close() if flattog is None: print "Flattening images..." if len(flatlist) == 0: while True: print "Uh oh! No flatfield exposure found!" flatpath = raw_input("Enter path to external flat image: " + "(leave blank to skip flattening) ") if flatpath == "" or isfile(flatpath): break else: combine_flat(flatlist, "flat.fits") flatpath = "flat.fits" if flatpath != "": notflatlist = [] for objlist in objlists: notflatlist += objlist flatten(notflatlist, flatpath) else: print "Skipping image flattening. (Not recommended!)" else: print "Images have already been flattened." # Make separate directories for each object. # This is the first bit since 'singext' to create a new directory, because # this is the first point where it's really necessary to start treating the # images from different tracks very differently. for i in range(len(objlists)): if isdir(list_of_objs[i].replace(" ", "")): while True: yn = raw_input("A directory for object " + list_of_objs[i] + " already exists. Recreate? (y/n) ") if "n" in yn or "N" in yn: do_copy = False break elif "y" in yn or "Y" in yn: do_copy = True rmtree(list_of_objs[i].replace(" ", "")) break else: do_copy = True if do_copy: mkdir(list_of_objs[i].replace(" ", "")) for j in range(len(objlists[i])): copyfile(objlists[i][j], join(list_of_objs[i].replace(" ", ""), split(objlists[i][j])[1])) for j in range(len(objlists[i])): objlists[i][j] = join(list_of_objs[i].replace(" ", ""), split(objlists[i][j])[1]) # Update the filter lists for i in range(len(filtlists)): for j in range(len(filtlists[i])): for k in range(len(objlists)): for l in range(len(objlists[k])): if split(filtlists[i][j])[1] == split(objlists[k][l])[1]: filtlists[i][j] = objlists[k][l] # Image alignment and normalization for i in range(len(objlists)): firstimage = FPImage(objlists[i][0]) aligned = firstimage.phottog firstimage.close() if aligned is None: print ("Aligning and normalizing images for object " + list_of_objs[i] + "...") align_norm(objlists[i]) else: print ("Images for object " + list_of_objs[i] + " have already been aligned and normalized.") # Make a median image for each object for i in range(len(objlists)): if isfile(join(list_of_objs[i].replace(" ", ""), "median.fits")): while True: yn = raw_input("Median image for object " + list_of_objs[i] + " already exists. Replace it? (y/n) ") if "n" in yn or "N" in yn: break elif "y" in yn or "Y" in yn: make_median(objlists[i], join(list_of_objs[i].replace(" ", ""), "median.fits")) break else: make_median(objlists[i], join(list_of_objs[i].replace(" ", ""), "median.fits")) # Wavelength calibrations all_rings_list = [] for i in range(len(list_of_filts)): all_rings_list = all_rings_list + filtlists[i] firstimage = FPImage(all_rings_list[0]) calf = firstimage.calf firstimage.close() if not (calf is None): while True: yn = raw_input("Wavelength solution already found. " + "Redo it? (y/n) ") if "n" in yn or "N" in yn: break elif "y" in yn or "Y" in yn: fit_wave_soln(all_rings_list) break else: fit_wave_soln(all_rings_list) # Sky ring removal for i in range(len(objlists)): for j in range(len(objlists[i])): # Check to see if sky rings have already been removed image = FPImage(objlists[i][j]) deringed = image.ringtog image.close() if deringed is None: print "Subtracting sky rings for image " + objlists[i][j] sub_sky_rings([objlists[i][j]], [join(list_of_objs[i].replace(" ", ""), "median.fits")]) else: print ("Sky ring subtraction already done for image " + objlists[i][j]) # Creation of data cube and convolution to uniform PSF for i in range(len(objlists)): if isdir(list_of_objs[i].replace(" ", "") + "_cube"): while True: yn = raw_input("A data cube for object " + list_of_objs[i] + " already exists. Recreate? (y/n) ") if "n" in yn or "N" in yn: do_create = False break elif "y" in yn or "Y" in yn: # Confirmation yn = raw_input("Are you sure? This takes a while. (y/n) ") if ("y" in yn or "Y" in yn) and not ("n" in yn or "N" in yn): do_create = True rmtree(list_of_objs[i].replace(" ", "") + "_cube") break else: do_create = True if do_create: mkdir(list_of_objs[i].replace(" ", "") + "_cube") for j in range(len(objlists[i])): image = FPImage(objlists[i][j]) fwhm = image.fwhm if j == 0: largestfwhm = fwhm if fwhm > largestfwhm: largestfwhm = fwhm image.close() while True: prompt = "Enter desired final fwhm or leave blank to use" + " default (" + str(largestfwhm) + " pix) " user_fwhm = raw_input(prompt) if user_fwhm == "": user_fwhm = largestfwhm break else: try: user_fwhm = float(user_fwhm) except ValueError: print "That wasn't a valid number..." else: if user_fwhm < largestfwhm: print ("Final fwhm must exceed " + str(largestfwhm) + " pixels.") else: break desired_fwhm = user_fwhm * 1.01 for j in range(len(objlists[i])): make_final_image( objlists[i][j], join(list_of_objs[i].replace(" ", "") + "_cube", split(objlists[i][j])[1]), desired_fwhm, clobber=True, ) # Get final lists for the velocity map fitting for each object final_lists = [] for i in range(len(list_of_objs)): final_lists.append([]) for j in range(len(objlists[i])): final_lists[i].append(join(list_of_objs[i].replace(" ", "") + "_cube", split(objlists[i][j])[1])) # Shift to solar velocity frame for i in range(len(list_of_objs)): firstimage = FPImage(final_lists[i][0]) velshift = firstimage.solarvel firstimage.close() if velshift is None: print ("Performing solar velocity shift for object " + list_of_objs[i] + "...") solar_velocity_shift(final_lists[i], rest_wave) else: print ("Solar velocity shift for object " + list_of_objs[i] + " already done.") if not do_velmap: sys.exit("Velocity map not made - Voigt-fitting software not found.") # Velocity map fitting for i in range(len(list_of_objs)): if isfile(join(list_of_objs[i].replace(" ", "") + "_cube", "velocity.fits")): while True: yn = raw_input("Velocity map already fitted for object " + list_of_objs[i] + ". Redo this? (y/n) ") if "n" in yn or "N" in yn: domap = False break elif "y" in yn or "Y" in yn: # Confirmation yn = raw_input("Are you sure? This takes a while. (y/n) ") if ("y" in yn or "Y" in yn) and not ("n" in yn or "N" in yn): domap = True break else: domap = True if domap: print "Fitting velocity map for object " + list_of_objs[i] + "..." if mode == "halpha": fit_velmap_ha_n2_mode(final_lists[i], list_of_objs[i].replace(" ", "") + "_cube", clobber=True) # Clean velocity map for i in range(len(list_of_objs)): make_clean_map(list_of_objs[i].replace(" ", "") + "_cube", clobber=True)
def load_modules(): # Define a function to load all of the modules so that they don't' import # unless we need them global iraf from pyraf import iraf iraf.pysalt() iraf.saltspec() iraf.saltred() iraf.set(clobber='YES') global sys import sys global os import os global shutil import shutil global glob from glob import glob global pyfits import pyfits global np import numpy as np global lacosmicx import lacosmicx global interp from scipy import interp global signal from scipy import signal global ndimage from scipy import ndimage global interpolate from scipy import interpolate global WCS from astropy.wcs import WCS global optimize from scipy import optimize global ds9 import ds9 global GaussianProcess from sklearn.gaussian_process import GaussianProcess global pandas import pandas iraf.onedspec() iraf.twodspec() iraf.longslit() iraf.apextract() iraf.imutil()