def bfixpix(image_file, mask_file, outsuffix='_f', msksuffix='_s'): """ Inputs --------- image_file : string input image file to fix bad pixels on mask_file : string mask file (0 == good pixels, >0 == bad pixels outsuffix : string suffix for fixed image. default = '_f' msksuffix : string suffix for bad pixels significance mask. default = '_s' """ outf = image_file.replace('.fits', outsuffix + '.fits') outm = image_file.replace('.fits', msksuffix + '.fits') util.rmall([outf, outm]) print("bfixpix: {0} -> {1}".format(image_file, outf)) # fetch the image, fetch the mask img, hdr = fits.getdata(image_file, header=True) msk = fits.getdata(mask_file) # median the image medimg = ndimage.median_filter(img, 3, mode='nearest') # generate the pixel files outf_img = np.where(msk == 0, img, medimg) outm_img = np.where(msk == 1, (img - medimg), 0) fits.writeto(outf, outf_img, hdr) fits.writeto(outm, outm_img, hdr)
def darPlusDistortion(inputFits, outputRoot, xgeoim=None, ygeoim=None): """ Create lookup tables (stored as FITS files) that can be used to correct DAR. Optionally, the shifts due to DAR can be added to existing NIRC2 distortion lookup tables if the xgeoim/ygeoim input parameters are set. Inputs: inputFits - a NIRC2 image for which to determine the DAR correction outputRoot - the root name for the output. This will be used as the root name of two new images with names, <outputRoot>_x.fits and <outputRoot>_y.fits. Optional Inputs: xgeoim/ygeoim - FITS images used in Drizzle distortion correction (lookup tables) will be modified to incorporate the DAR correction. The order of the correction is 1. distortion, 2. DAR. """ # Get the size of the image and the half-points hdr = pyfits.getheader(inputFits) imgsizeX = float(hdr['NAXIS1']) imgsizeY = float(hdr['NAXIS2']) halfX = round(imgsizeX / 2.0) halfY = round(imgsizeY / 2.0) # First get the coefficients (pa, darCoeffL, darCoeffQ) = nirc2dar(inputFits) #(a, b) = nirc2darPoly(inputFits) # Create two 1024 arrays (or read in existing ones) for the # X and Y lookup tables if ((xgeoim == None) or (xgeoim == '')): x = np.zeros((imgsizeY, imgsizeX), dtype=float) else: x = pyfits.getdata(xgeoim) if ((ygeoim == None) or (ygeoim == '')): y = np.zeros((imgsizeY, imgsizeX), dtype=float) else: y = pyfits.getdata(ygeoim) # Get proper header info. fits = pyfits.open(inputFits) axisX = np.arange(imgsizeX, dtype=float) - halfX axisY = np.arange(imgsizeY, dtype=float) - halfY xcoo2d, ycoo2d = np.meshgrid(axisX, axisY) xnew1 = xcoo2d + x ynew1 = ycoo2d + y # Rotate coordinates clockwise by PA so that zenith is along +ynew2 # PA = parallactic angle (angle from +y to zenith going CCW) sina = math.sin(pa) cosa = math.cos(pa) xnew2 = xnew1 * cosa + ynew1 * sina ynew2 = -xnew1 * sina + ynew1 * cosa # Apply DAR correction along the y axis xnew3 = xnew2 ynew3 = ynew2*(1 + darCoeffL) + ynew2*np.abs(ynew2)*darCoeffQ # Rotate coordinates counter-clockwise by PA back to original xnew4 = xnew3 * cosa - ynew3 * sina ynew4 = xnew3 * sina + ynew3 * cosa #xnew2 = a[0] + a[1]*xnew1 + a[2]*ynew1 + \ # a[3]*xnew1**2 + a[4]*xnew1*ynew1 + a[5]*ynew1**2 #ynew2 = b[0] + b[1]*xnew1 + b[2]*ynew1 + \ # b[3]*xnew1**2 + b[4]*xnew1*ynew1 + b[5]*ynew1**2 x = xnew4 - xcoo2d y = ynew4 - ycoo2d xout = outputRoot + '_x.fits' yout = outputRoot + '_y.fits' util.rmall([xout, yout]) fits[0].data = x fits[0].writeto(xout, output_verify='silentfix') fits[0].data = y fits[0].writeto(yout, output_verify='silentfix') return (xout, yout)
def red_dir(directory, clean_dir, sky_key='sky', flat_key='Domeflat', sci_keys=['Wd 2 pos 1', 'Wd 2 pos 2', 'Wd 2 pos 3', 'Wd 2 pos 4'], frame_list=None): ''' Note, must be ran from pyraf interavtive terminal perform reduction on directory given directory must be full path name sky_key is header keywaord for sky frames dome_key is header keyword for domes sci_coadds is the minimum number of coadds required for an image to be considered a science image ''' #os.chdir(directory) print 'Reduction being performed in ', directory if frame_list == None: frame_list = glob.glob(directory + '*.fits') dir_ap = '' for i in range(len(frame_list)): frame_list[i] = frame_list[i].replace('.fits', '') else: dir_ap = directory #go through the fits files and make 3 lists, one of skies, one of domes one of science frames sci_f = open('obj.lis', 'w') dome_f = open('flat.lis', 'w') all_f = open('all.lis', 'w') sky_f = open('sky.lis', 'w') dome_list = [] sci_l = [] for i in frame_list: #import pdb; pdb.set_trace() print >> all_f, dir_ap + i + '.fits' head = fits.getheader(dir_ap + i + '.fits') if head['OBJECT'] == sky_key: print >> sky_f, dir_ap + 'g' + i + '.fits' elif head['OBJECT'] == flat_key: print >> dome_f, dir_ap + 'g' + i + '.fits' dome_list.append(i) else: for j in sci_keys: if head['OBJECT'].replace(' ', '') == j: print >> sci_f, dir_ap + 'g' + i + '.fits' sci_l.append(i + '.fits') if len(sci_l) == 0: print 'No science frames found in directory' import pdb pdb.set_trace() sky_f.close() sci_f.close() dome_f.close() all_f.close() from pyraf.iraf import gemini from pyraf.iraf import gsaoi from pyraf import iraf gemini.unlearn() gsaoi.unlearn() #raw_dir = util.getcwd() #prep_dir = raw_dir+'g' #print raw_dir util.rmall(['gaprep.log']) print 'Arguements for gaprepare', '@all.lis', directory + 'g' gsaoi.gaprepare('@all.lis', outpref=directory + 'g', fl_vardq='yes', logfile='gaprep.log') gsaoi.gaflat('@flat.lis', outsufx='flat', fl_vardq='yes') flat_name = 'g' + dome_list[0] + "_flat.fits" shutil.move('g' + dome_list[0] + "_flat.fits", directory + 'g' + dome_list[0] + "_flat.fits") #print flat_name #gsaoi.gareduce('@sky.lis', rawpath=directory, gaprep_pref = directory+'g',calpath=directory, fl_flat='yes', flatimg=flat_name) gsaoi.gasky('@sky.lis', outimages='sky.fits', fl_vardq='yes', fl_dqprop='yes', flatimg=directory + flat_name) shutil.move('sky.fits', directory + 'sky.fits') gsaoi.gareduce('@obj.lis', fl_vardq='yes', fl_dqprop='yes', fl_dark='no', calpath=directory, fl_sky='yes', skyimg=directory + 'sky.fits', fl_flat='yes', flatimg=flat_name) #util.rmall(['obj.lis','sky.lis','flat.lis', 'all.lis']) for i in sci_l: for k in range(4): iraf.imcopy('rg' + i + '[' + str(k + 1) + '][inherit+]', 'rg' + i.replace('.fits', str(k + 1) + '.fits')) #add in line to reflect the x-axis --- note I do not correct the WCS at all!!!! iraf.imcopy('rg' + i.replace('.fits', str(k + 1) + '.fits[-*,*]'), 'rg' + i.replace('.fits', str(k + 1) + '.fits')) shutil.move( 'rg' + i.replace('.fits', str(k + 1) + '.fits'), clean_dir + 'rg' + i.replace('.fits', '_' + str(k + 1) + '.fits')) shutil.copy('rg' + i, directory + 'rg' + i) os.remove('rg' + i)
def __del__(self): rmall(self.tempdir)
def darPlusDistortion(inputFits, outputRoot, xgeoim=None, ygeoim=None): """ Create lookup tables (stored as FITS files) that can be used to correct DAR. Optionally, the shifts due to DAR can be added to existing NIRC2 distortion lookup tables if the xgeoim/ygeoim input parameters are set. Inputs: inputFits - a NIRC2 image for which to determine the DAR correction outputRoot - the root name for the output. This will be used as the root name of two new images with names, <outputRoot>_x.fits and <outputRoot>_y.fits. Optional Inputs: xgeoim/ygeoim - FITS images used in Drizzle distortion correction (lookup tables) will be modified to incorporate the DAR correction. The order of the correction is 1. distortion, 2. DAR. """ # Get the size of the image and the half-points hdr = pyfits.getheader(inputFits) imgsizeX = float(hdr['NAXIS1']) imgsizeY = float(hdr['NAXIS2']) halfX = round(imgsizeX / 2.0) halfY = round(imgsizeY / 2.0) # First get the coefficients (pa, darCoeffL, darCoeffQ) = nirc2dar(inputFits) #(a, b) = nirc2darPoly(inputFits) # Create two 1024 arrays (or read in existing ones) for the # X and Y lookup tables if ((xgeoim == None) or (xgeoim == '')): x = np.zeros((imgsizeY, imgsizeX), dtype=float) else: x = pyfits.getdata(xgeoim) if ((ygeoim == None) or (ygeoim == '')): y = np.zeros((imgsizeY, imgsizeX), dtype=float) else: y = pyfits.getdata(ygeoim) # Get proper header info. fits = pyfits.open(inputFits) axisX = np.arange(imgsizeX, dtype=float) - halfX axisY = np.arange(imgsizeY, dtype=float) - halfY xcoo2d, ycoo2d = np.meshgrid(axisX, axisY) xnew1 = xcoo2d + x ynew1 = ycoo2d + y # Rotate coordinates clockwise by PA so that zenith is along +ynew2 # PA = parallactic angle (angle from +y to zenith going CCW) sina = math.sin(pa) cosa = math.cos(pa) xnew2 = xnew1 * cosa + ynew1 * sina ynew2 = -xnew1 * sina + ynew1 * cosa # Apply DAR correction along the y axis xnew3 = xnew2 ynew3 = ynew2 * (1 + darCoeffL) + ynew2 * np.abs(ynew2) * darCoeffQ # Rotate coordinates counter-clockwise by PA back to original xnew4 = xnew3 * cosa - ynew3 * sina ynew4 = xnew3 * sina + ynew3 * cosa #xnew2 = a[0] + a[1]*xnew1 + a[2]*ynew1 + \ # a[3]*xnew1**2 + a[4]*xnew1*ynew1 + a[5]*ynew1**2 #ynew2 = b[0] + b[1]*xnew1 + b[2]*ynew1 + \ # b[3]*xnew1**2 + b[4]*xnew1*ynew1 + b[5]*ynew1**2 x = xnew4 - xcoo2d y = ynew4 - ycoo2d xout = outputRoot + '_x.fits' yout = outputRoot + '_y.fits' util.rmall([xout, yout]) fits[0].data = x fits[0].writeto(xout, output_verify='silentfix') fits[0].data = y fits[0].writeto(yout, output_verify='silentfix') return (xout, yout)
def red_dir(directory,clean_dir, sky_key='sky', flat_key='Domeflat', sci_keys= ['Wd 2 pos 1','Wd 2 pos 2', 'Wd 2 pos 3', 'Wd 2 pos 4'], frame_list = None): ''' Note, must be ran from pyraf interavtive terminal perform reduction on directory given directory must be full path name sky_key is header keywaord for sky frames dome_key is header keyword for domes sci_coadds is the minimum number of coadds required for an image to be considered a science image ''' #os.chdir(directory) print 'Reduction being performed in ', directory if frame_list == None: frame_list = glob.glob(directory+'*.fits') dir_ap = '' for i in range(len(frame_list)): frame_list[i] = frame_list[i].replace('.fits','') else: dir_ap=directory #go through the fits files and make 3 lists, one of skies, one of domes one of science frames sci_f = open('obj.lis', 'w') dome_f = open('flat.lis', 'w') all_f = open('all.lis', 'w') sky_f = open('sky.lis','w') dome_list = [] sci_l = [] for i in frame_list: #import pdb; pdb.set_trace() print >> all_f, dir_ap+i+'.fits' head = fits.getheader(dir_ap+i+'.fits') if head['OBJECT'] == sky_key: print >> sky_f, dir_ap+'g'+i+'.fits' elif head['OBJECT']==flat_key: print >> dome_f, dir_ap+'g'+i+'.fits' dome_list.append(i) else: for j in sci_keys: if head['OBJECT'].replace(' ','') == j: print >> sci_f, dir_ap+'g'+i+'.fits' sci_l.append(i+'.fits') if len(sci_l)==0: print 'No science frames found in directory' import pdb; pdb.set_trace() sky_f.close() sci_f.close() dome_f.close() all_f.close() from pyraf.iraf import gemini from pyraf.iraf import gsaoi from pyraf import iraf gemini.unlearn() gsaoi.unlearn() #raw_dir = util.getcwd() #prep_dir = raw_dir+'g' #print raw_dir util.rmall(['gaprep.log']) print 'Arguements for gaprepare', '@all.lis', directory+'g' gsaoi.gaprepare('@all.lis',outpref=directory+'g',fl_vardq='yes', logfile='gaprep.log') gsaoi.gaflat('@flat.lis', outsufx='flat', fl_vardq='yes') flat_name= 'g'+dome_list[0]+"_flat.fits" shutil.move('g'+dome_list[0]+"_flat.fits", directory+'g'+dome_list[0]+"_flat.fits") #print flat_name #gsaoi.gareduce('@sky.lis', rawpath=directory, gaprep_pref = directory+'g',calpath=directory, fl_flat='yes', flatimg=flat_name) gsaoi.gasky('@sky.lis', outimages='sky.fits', fl_vardq='yes', fl_dqprop='yes', flatimg=directory+flat_name) shutil.move('sky.fits', directory+'sky.fits') gsaoi.gareduce('@obj.lis',fl_vardq='yes', fl_dqprop='yes', fl_dark='no',calpath=directory, fl_sky='yes',skyimg=directory+'sky.fits', fl_flat='yes',flatimg=flat_name) #util.rmall(['obj.lis','sky.lis','flat.lis', 'all.lis']) for i in sci_l: for k in range(4): iraf.imcopy('rg'+i+'['+str(k+1)+'][inherit+]' , 'rg'+i.replace('.fits',str(k+1)+'.fits')) #add in line to reflect the x-axis --- note I do not correct the WCS at all!!!! iraf.imcopy('rg'+i.replace('.fits',str(k+1)+'.fits[-*,*]'), 'rg'+i.replace('.fits',str(k+1)+'.fits')) shutil.move('rg'+i.replace('.fits',str(k+1)+'.fits'), clean_dir+'rg'+i.replace('.fits','_'+str(k+1)+'.fits')) shutil.copy('rg'+i, directory+'rg'+i) os.remove('rg'+i)
def makesky_lp(files, nite, wave, number=3, rejectHsigma=None): """Make L' skies by carefully treating the ROTPPOSN angle of the K-mirror. Uses 3 skies combined (set by number keyword).""" # Start out in something like '06maylgs1/reduce/kp/' waveDir = os.getcwd() + '/' redDir = util.trimdir(os.path.abspath(waveDir + '../') + '/') rootDir = util.trimdir(os.path.abspath(redDir + '../') + '/') skyDir = waveDir + 'sky_' + nite + '/' rawDir = rootDir + 'raw/' util.mkdir(skyDir) raw = [rawDir + 'n' + str(i).zfill(4) for i in files] skies = [skyDir + 'n' + str(i).zfill(4) for i in files] _rawlis = skyDir + 'raw.lis' _nlis = skyDir + 'n.lis' _skyRot = skyDir + 'skyRot.txt' _txt = skyDir + 'rotpposn.txt' _out = skyDir + 'sky' _log = _out + '.log' util.rmall([_rawlis, _nlis, _skyRot, _txt, _out, _log]) util.rmall([sky + '.fits' for sky in skies]) open(_rawlis, 'w').write('\n'.join(raw) + '\n') open(_nlis, 'w').write('\n'.join(skies) + '\n') print 'makesky_lp: Getting raw files' ir.imcopy('@' + _rawlis, '@' + _nlis, verbose='no') ir.hselect('@' + _nlis, "$I,ROTPPOSN", 'yes', Stdout=_skyRot) # Read in the list of files and rotation angles rotTab = asciidata.open(_skyRot) files = rotTab[0].tonumpy() angles = rotTab[1].tonumpy() # Fix angles to be between -180 and 180 angles[angles > 180] -= 360.0 angles[angles < -180] += 360.0 sidx = np.argsort(angles) # Make sorted numarrays angles = angles[sidx] files = files[sidx] f_log = open(_log, 'w') f_txt = open(_txt, 'w') # Skip the first and last since we are going to # average every NN files. print 'makesky_lp: Combining to make skies.' startIdx = number / 2 stopIdx = len(sidx) - (number / 2) for i in range(startIdx, stopIdx): sky = 'sky%.1f' % (angles[i]) skyFits = skyDir + sky + '.fits' util.rmall([skyFits]) # Take NN images start = i - (number / 2) stop = start + number list = [file for file in files[start:stop]] short = [file for file in files[start:stop]] angleTmp = angles[start:stop] # Make short names for j in range(len(list)): tmp = (short[j]).rsplit('/', 1) short[j] = tmp[len(tmp) - 1] print '%s: %s' % (sky, " ".join(short)) f_log.write('%s:' % sky) for j in range(len(short)): f_log.write(' %s' % short[j]) for j in range(len(angleTmp)): f_log.write(' %6.1f' % angleTmp[j]) f_log.write('\n') ir.unlearn('imcombine') ir.imcombine.combine = 'median' if (rejectHsigma == None): ir.imcombine.reject = 'none' ir.imcombine.nlow = 1 ir.imcombine.nhigh = 1 else: ir.imcombine.reject = 'sigclip' ir.imcombine.lsigma = 100 ir.imcombine.hsigma = rejectHsigma ir.imcombine.zero = 'median' ir.imcombine.logfile = '' ir.imcombine(','.join(list), skyFits) ir.hedit(skyFits, 'SKYCOMB', '%s: %s' % (sky, ' '.join(short)), add='yes', show='no', verify='no') f_txt.write('%13s %8.3f\n' % (sky, angles[i])) f_txt.close() f_log.close()
def __del__(self): """Destructor. Cleans up as this object is destroyed.""" rmall(self.work_dir)
def makesky_fromsci(files, nite, wave): """Make short wavelength (not L-band or longer) skies.""" # Start out in something like '06maylgs1/reduce/kp/' waveDir = os.getcwd() + '/' redDir = util.trimdir(os.path.abspath(waveDir + '../') + '/') rootDir = util.trimdir(os.path.abspath(redDir + '../') + '/') skyDir = waveDir + 'sky_' + nite + '/' rawDir = rootDir + 'raw/' util.mkdir(skyDir) print 'sky dir: ', skyDir print 'wave dir: ', waveDir skylist = skyDir + 'skies_to_combine.lis' output = skyDir + 'sky_' + wave + '.fits' util.rmall([skylist, output]) nn = [skyDir + 'n' + str(i).zfill(4) for i in files] nsc = [skyDir + 'scale' + str(i).zfill(4) for i in files] skies = [rawDir + 'n' + str(i).zfill(4) for i in files] for ii in range(len(nn)): ir.imdelete(nn[ii]) ir.imdelete(nsc[ii]) ir.imcopy(skies[ii], nn[ii], verbose="no") # Make list for combinng. Reset the skyDir to an IRAF variable. ir.set(skydir=skyDir) f_on = open(skylist, 'w') for ii in range(len(nn)): nn_new = nn[ii].replace(skyDir, "skydir$") f_on.write(nn_new + '\n') f_on.close() # Calculate some sky statistics, but reject high (star-like) pixels sky_mean = np.zeros([len(skies)], dtype=float) sky_std = np.zeros([len(skies)], dtype=float) text = ir.imstat("@" + skylist, fields='midpt,stddev', nclip=10, lsigma=10, usigma=3, format=0, Stdout=1) for ii in range(len(nn)): fields = text[ii].split() sky_mean[ii] = float(fields[0]) sky_std[ii] = float(fields[1]) sky_mean_all = sky_mean.mean() sky_std_all = sky_std.mean() # Upper threshold above which we will ignore pixels when combining. hthreshold = sky_mean_all + 3.0 * sky_std_all ir.imdelete(output) ir.unlearn('imcombine') ir.imcombine.combine = 'median' ir.imcombine.reject = 'sigclip' ir.imcombine.mclip = 'yes' ir.imcombine.hsigma = 2 ir.imcombine.lsigma = 10 ir.imcombine.hthreshold = hthreshold ir.imcombine('@' + skylist, output)
def makesky(files, nite, wave, skyscale=1): """Make short wavelength (not L-band or longer) skies.""" # Start out in something like '06maylgs1/reduce/kp/' waveDir = os.getcwd() + '/' redDir = util.trimdir(os.path.abspath(waveDir + '../') + '/') rootDir = util.trimdir(os.path.abspath(redDir + '../') + '/') skyDir = waveDir + 'sky_' + nite + '/' rawDir = rootDir + 'raw/' util.mkdir(skyDir) print 'sky dir: ', skyDir print 'wave dir: ', waveDir skylist = skyDir + 'skies_to_combine.lis' output = skyDir + 'sky_' + wave + '.fits' util.rmall([skylist, output]) nn = [skyDir + 'n' + str(i).zfill(4) for i in files] nsc = [skyDir + 'scale' + str(i).zfill(4) for i in files] skies = [rawDir + 'n' + str(i).zfill(4) for i in files] for ii in range(len(nn)): ir.imdelete(nn[ii]) ir.imdelete(nsc[ii]) ir.imcopy(skies[ii], nn[ii], verbose="no") # scale skies to common median if skyscale: _skylog = skyDir + 'sky_scale.log' util.rmall([_skylog]) f_skylog = open(_skylog, 'w') sky_mean = np.zeros([len(skies)], dtype=float) for i in range(len(skies)): text = ir.imstat(nn[i], fields='mean', nclip=4, lsigma=10, usigma=10, format=0, Stdout=1) sky_mean[i] = float(text[0]) sky_all = sky_mean.mean() sky_scale = sky_all / sky_mean for i in range(len(skies)): ir.imarith(nn[i], '*', sky_scale[i], nsc[i]) skyf = nn[i].split('/') print('%s skymean=%10.2f skyscale=%10.2f' % (skyf[len(skyf) - 1], sky_mean[i], sky_scale[i])) f_skylog.write('%s %10.2f %10.2f\n' % (nn[i], sky_mean[i], sky_scale[i])) # Make list for combinng f_on = open(skylist, 'w') f_on.write('\n'.join(nsc) + '\n') f_on.close() #skylist = skyDir + 'scale????.fits' f_skylog.close() else: # Make list for combinng f_on = open(skylist, 'w') f_on.write('\n'.join(nn) + '\n') f_on.close() #skylist = skyDir + 'n????.fits' ir.imdelete(output) ir.unlearn('imcombine') ir.imcombine.combine = 'median' ir.imcombine.reject = 'none' ir.imcombine.nlow = 1 ir.imcombine.nhigh = 1 ir.imcombine('@' + skylist, output)
def makesky_lp2(files, nite, wave): """Make L' skies by carefully treating the ROTPPOSN angle of the K-mirror. Uses only 2 skies combined.""" # Start out in something like '06maylgs1/reduce/kp/' waveDir = os.getcwd() + '/' redDir = util.trimdir(os.path.abspath(waveDir + '../') + '/') rootDir = util.trimdir(os.path.abspath(redDir + '../') + '/') skyDir = waveDir + 'sky_' + nite + '/' rawDir = rootDir + 'raw/' util.mkdir(skyDir) raw = [rawDir + 'n' + str(i).zfill(4) for i in files] skies = [skyDir + 'n' + str(i).zfill(4) for i in files] _rawlis = skyDir + 'raw.lis' _nlis = skyDir + 'n.lis' _skyRot = skyDir + 'skyRot.txt' _txt = skyDir + 'rotpposn.txt' _out = skyDir + 'sky' _log = _out + '.log' util.rmall([_rawlis, _nlis, _skyRot, _txt, _out, _log]) util.rmall([sky + '.fits' for sky in skies]) open(_rawlis, 'w').write('\n'.join(raw) + '\n') open(_nlis, 'w').write('\n'.join(skies) + '\n') print 'makesky_lp: Getting raw files' ir.imcopy('@' + _rawlis, '@' + _nlis, verbose='no') ir.hselect('@' + _nlis, "$I,ROTPPOSN", 'yes', Stdout=_skyRot) # Read in the list of files and rotation angles rotTab = asciidata.open(_skyRot) files = rotTab[0].tonumpy() angles = rotTab[1].tonumpy() # Fix angles to be between -180 and 180 angles[angles > 180] -= 360.0 angles[angles < -180] += 360.0 sidx = np.argsort(angles) # Make sorted numarrays angles = angles[sidx] files = files[sidx] f_log = open(_log, 'w') f_txt = open(_txt, 'w') # Skip the first and last since we are going to # average every 3 files. print 'makesky_lp: Combining to make skies.' for i in range(1, len(sidx)): angav = (angles[i] + angles[i - 1]) / 2. sky = 'sky%.1f' % (angav) skyFits = skyDir + sky + '.fits' util.rmall([skyFits]) # Average 2 images list = [file for file in files[i - 1:i + 1]] short = [file for file in files[i - 1:i + 1]] # Make short names for j in range(len(list)): tmp = (short[j]).rsplit('/', 1) short[j] = tmp[len(tmp) - 1] print '%s: %s %s' % (sky, short[0], short[1]) f_log.write('%s: %s %s %6.1f %6.1f\n' % (sky, short[0], short[1], angles[i - 1], angles[i])) ir.unlearn('imcombine') ir.imcombine.combine = 'average' ir.imcombine.reject = 'none' ir.imcombine.nlow = 1 ir.imcombine.nhigh = 1 ir.imcombine.logfile = '' ir.imcombine(list[1] + ',' + list[0], skyFits) ir.hedit(skyFits, 'SKYCOMB', '%s: %s %s' % (sky, short[0], short[1]), add='yes', show='no', verify='no') f_txt.write('%13s %8.3f\n' % (sky, angav)) f_txt.close() f_log.close()
def makesky_lp(files, nite, wave, number=3, rejectHsigma=None): """Make L' skies by carefully treating the ROTPPOSN angle of the K-mirror. Uses 3 skies combined (set by number keyword).""" # Start out in something like '06maylgs1/reduce/kp/' waveDir = os.getcwd() + '/' redDir = util.trimdir(os.path.abspath(waveDir + '../') + '/') rootDir = util.trimdir(os.path.abspath(redDir + '../') + '/') skyDir = waveDir + 'sky_' + nite + '/' rawDir = rootDir + 'raw/' util.mkdir(skyDir) raw = [rawDir + 'n' + str(i).zfill(4) for i in files] skies = [skyDir + 'n' + str(i).zfill(4) for i in files] _rawlis = skyDir + 'raw.lis' _nlis = skyDir + 'n.lis' _skyRot = skyDir + 'skyRot.txt' _txt = skyDir + 'rotpposn.txt' _out = skyDir + 'sky' _log = _out + '.log' util.rmall([_rawlis, _nlis, _skyRot, _txt, _out, _log]) util.rmall([sky + '.fits' for sky in skies]) open(_rawlis, 'w').write('\n'.join(raw)+'\n') open(_nlis, 'w').write('\n'.join(skies)+'\n') print 'makesky_lp: Getting raw files' ir.imcopy('@' + _rawlis, '@' + _nlis, verbose='no') ir.hselect('@' + _nlis, "$I,ROTPPOSN", 'yes', Stdout=_skyRot) # Read in the list of files and rotation angles rotTab = asciidata.open(_skyRot) files = rotTab[0].tonumpy() angles = rotTab[1].tonumpy() # Fix angles to be between -180 and 180 angles[angles > 180] -= 360.0 angles[angles < -180] += 360.0 sidx = np.argsort(angles) # Make sorted numarrays angles = angles[sidx] files = files[sidx] f_log = open(_log, 'w') f_txt = open(_txt, 'w') # Skip the first and last since we are going to # average every NN files. print 'makesky_lp: Combining to make skies.' startIdx = number / 2 stopIdx = len(sidx) - (number / 2) for i in range(startIdx, stopIdx): sky = 'sky%.1f' % (angles[i]) skyFits = skyDir + sky + '.fits' util.rmall([skyFits]) # Take NN images start = i - (number/2) stop = start + number list = [file for file in files[start:stop]] short = [file for file in files[start:stop]] angleTmp = angles[start:stop] # Make short names for j in range(len(list)): tmp = (short[j]).rsplit('/', 1) short[j] = tmp[len(tmp)-1] print '%s: %s' % (sky, " ".join(short)) f_log.write('%s:' % sky) for j in range(len(short)): f_log.write(' %s' % short[j]) for j in range(len(angleTmp)): f_log.write(' %6.1f' % angleTmp[j]) f_log.write('\n') ir.unlearn('imcombine') ir.imcombine.combine = 'median' if (rejectHsigma == None): ir.imcombine.reject = 'none' ir.imcombine.nlow = 1 ir.imcombine.nhigh = 1 else: ir.imcombine.reject = 'sigclip' ir.imcombine.lsigma = 100 ir.imcombine.hsigma = rejectHsigma ir.imcombine.zero = 'median' ir.imcombine.logfile = '' ir.imcombine(','.join(list), skyFits) ir.hedit(skyFits, 'SKYCOMB', '%s: %s' % (sky, ' '.join(short)), add='yes', show='no', verify='no') f_txt.write('%13s %8.3f\n' % (sky, angles[i])) f_txt.close() f_log.close()
def makesky(files, nite, wave, skyscale=1): """Make short wavelength (not L-band or longer) skies.""" # Start out in something like '06maylgs1/reduce/kp/' waveDir = os.getcwd() + '/' redDir = util.trimdir(os.path.abspath(waveDir + '../') + '/') rootDir = util.trimdir(os.path.abspath(redDir + '../') + '/') skyDir = waveDir + 'sky_' + nite + '/' rawDir = rootDir + 'raw/' util.mkdir(skyDir) print 'sky dir: ',skyDir print 'wave dir: ',waveDir skylist = skyDir + 'skies_to_combine.lis' output = skyDir + 'sky_' + wave + '.fits' util.rmall([skylist, output]) nn = [skyDir + 'n' + str(i).zfill(4) for i in files] nsc = [skyDir + 'scale' + str(i).zfill(4) for i in files] skies = [rawDir + 'n' + str(i).zfill(4) for i in files] for ii in range(len(nn)): ir.imdelete(nn[ii]) ir.imdelete(nsc[ii]) ir.imcopy(skies[ii], nn[ii], verbose="no") # scale skies to common median if skyscale: _skylog = skyDir + 'sky_scale.log' util.rmall([_skylog]) f_skylog = open(_skylog, 'w') sky_mean = np.zeros([len(skies)], dtype=float) for i in range(len(skies)): text = ir.imstat(nn[i], fields='mean', nclip=4, lsigma=10, usigma=10, format=0, Stdout=1) sky_mean[i] = float(text[0]) sky_all = sky_mean.mean() sky_scale = sky_all/sky_mean for i in range(len(skies)): ir.imarith(nn[i], '*', sky_scale[i], nsc[i]) skyf = nn[i].split('/') print('%s skymean=%10.2f skyscale=%10.2f' % (skyf[len(skyf)-1], sky_mean[i],sky_scale[i])) f_skylog.write('%s %10.2f %10.2f\n' % (nn[i], sky_mean[i], sky_scale[i])) # Make list for combinng f_on = open(skylist, 'w') f_on.write('\n'.join(nsc) + '\n') f_on.close() #skylist = skyDir + 'scale????.fits' f_skylog.close() else: # Make list for combinng f_on = open(skylist, 'w') f_on.write('\n'.join(nn) + '\n') f_on.close() #skylist = skyDir + 'n????.fits' ir.imdelete(output) ir.unlearn('imcombine') ir.imcombine.combine = 'median' ir.imcombine.reject = 'none' ir.imcombine.nlow = 1 ir.imcombine.nhigh = 1 ir.imcombine('@' + skylist, output)
def makesky_fromsci(files, nite, wave): """Make short wavelength (not L-band or longer) skies.""" # Start out in something like '06maylgs1/reduce/kp/' waveDir = os.getcwd() + '/' redDir = util.trimdir(os.path.abspath(waveDir + '../') + '/') rootDir = util.trimdir(os.path.abspath(redDir + '../') + '/') skyDir = waveDir + 'sky_' + nite + '/' rawDir = rootDir + 'raw/' util.mkdir(skyDir) print 'sky dir: ',skyDir print 'wave dir: ',waveDir skylist = skyDir + 'skies_to_combine.lis' output = skyDir + 'sky_' + wave + '.fits' util.rmall([skylist, output]) nn = [skyDir + 'n' + str(i).zfill(4) for i in files] nsc = [skyDir + 'scale' + str(i).zfill(4) for i in files] skies = [rawDir + 'n' + str(i).zfill(4) for i in files] for ii in range(len(nn)): ir.imdelete(nn[ii]) ir.imdelete(nsc[ii]) ir.imcopy(skies[ii], nn[ii], verbose="no") # Make list for combinng. Reset the skyDir to an IRAF variable. ir.set(skydir=skyDir) f_on = open(skylist, 'w') for ii in range(len(nn)): nn_new = nn[ii].replace(skyDir, "skydir$") f_on.write(nn_new + '\n') f_on.close() # Calculate some sky statistics, but reject high (star-like) pixels sky_mean = np.zeros([len(skies)], dtype=float) sky_std = np.zeros([len(skies)], dtype=float) text = ir.imstat("@" + skylist, fields='midpt,stddev', nclip=10, lsigma=10, usigma=3, format=0, Stdout=1) for ii in range(len(nn)): fields = text[ii].split() sky_mean[ii] = float(fields[0]) sky_std[ii] = float(fields[1]) sky_mean_all = sky_mean.mean() sky_std_all = sky_std.mean() # Upper threshold above which we will ignore pixels when combining. hthreshold = sky_mean_all + 3.0 * sky_std_all ir.imdelete(output) ir.unlearn('imcombine') ir.imcombine.combine = 'median' ir.imcombine.reject = 'sigclip' ir.imcombine.mclip = 'yes' ir.imcombine.hsigma = 2 ir.imcombine.lsigma = 10 ir.imcombine.hthreshold = hthreshold ir.imcombine('@' + skylist, output)
def makesky_lp2(files, nite, wave): """Make L' skies by carefully treating the ROTPPOSN angle of the K-mirror. Uses only 2 skies combined.""" # Start out in something like '06maylgs1/reduce/kp/' waveDir = os.getcwd() + '/' redDir = util.trimdir(os.path.abspath(waveDir + '../') + '/') rootDir = util.trimdir(os.path.abspath(redDir + '../') + '/') skyDir = waveDir + 'sky_' + nite + '/' rawDir = rootDir + 'raw/' util.mkdir(skyDir) raw = [rawDir + 'n' + str(i).zfill(4) for i in files] skies = [skyDir + 'n' + str(i).zfill(4) for i in files] _rawlis = skyDir + 'raw.lis' _nlis = skyDir + 'n.lis' _skyRot = skyDir + 'skyRot.txt' _txt = skyDir + 'rotpposn.txt' _out = skyDir + 'sky' _log = _out + '.log' util.rmall([_rawlis, _nlis, _skyRot, _txt, _out, _log]) util.rmall([sky + '.fits' for sky in skies]) open(_rawlis, 'w').write('\n'.join(raw)+'\n') open(_nlis, 'w').write('\n'.join(skies)+'\n') print 'makesky_lp: Getting raw files' ir.imcopy('@' + _rawlis, '@' + _nlis, verbose='no') ir.hselect('@' + _nlis, "$I,ROTPPOSN", 'yes', Stdout=_skyRot) # Read in the list of files and rotation angles rotTab = asciidata.open(_skyRot) files = rotTab[0].tonumpy() angles = rotTab[1].tonumpy() # Fix angles to be between -180 and 180 angles[angles > 180] -= 360.0 angles[angles < -180] += 360.0 sidx = np.argsort(angles) # Make sorted numarrays angles = angles[sidx] files = files[sidx] f_log = open(_log, 'w') f_txt = open(_txt, 'w') # Skip the first and last since we are going to # average every 3 files. print 'makesky_lp: Combining to make skies.' for i in range(1, len(sidx)): angav = (angles[i] + angles[i-1])/2. sky = 'sky%.1f' % (angav) skyFits = skyDir + sky + '.fits' util.rmall([skyFits]) # Average 2 images list = [file for file in files[i-1:i+1]] short = [file for file in files[i-1:i+1]] # Make short names for j in range(len(list)): tmp = (short[j]).rsplit('/', 1) short[j] = tmp[len(tmp)-1] print '%s: %s %s' % (sky, short[0], short[1]) f_log.write('%s: %s %s %6.1f %6.1f\n' % (sky, short[0], short[1], angles[i-1], angles[i])) ir.unlearn('imcombine') ir.imcombine.combine = 'average' ir.imcombine.reject = 'none' ir.imcombine.nlow = 1 ir.imcombine.nhigh = 1 ir.imcombine.logfile = '' ir.imcombine(list[1]+','+list[0], skyFits) ir.hedit(skyFits, 'SKYCOMB', '%s: %s %s' % (sky, short[0], short[1]), add='yes', show='no', verify='no') f_txt.write('%13s %8.3f\n' % (sky, angav)) f_txt.close() f_log.close()