def correctimages(imagesre, zero='Zero', flat='nFlat'): '''Run ccdproc task to correct images''' imageslist = glob.glob(imagesre) imagesin = ', '.join(imageslist) trimsection = str(raw_input('Enter trim section (or Hit <Enter>): ')) trimquery = True if trimsection == '': trimquery = False # Load Packages iraf.imred() iraf.ccdred() # Unlearn Settings iraf.ccdred.ccdproc.unlearn() iraf.ccdred.combine.unlearn() # Setup and run task iraf.ccdred.ccdproc.ccdtype = '' iraf.ccdred.ccdproc.noproc = False iraf.ccdred.ccdproc.fixpix = False iraf.ccdred.ccdproc.overscan = False iraf.ccdred.ccdproc.darkcor = False iraf.ccdred.ccdproc.illumcor = False iraf.ccdred.ccdproc.fringecor = False iraf.ccdred.ccdproc.readcor = False iraf.ccdred.ccdproc.scancor = False iraf.ccdred.ccdproc.trim = trimquery iraf.ccdred.ccdproc.trimsec = trimsection iraf.ccdred.ccdproc.readaxis = 'line' iraf.ccdred.ccdproc.zerocor = True iraf.ccdred.ccdproc.zero = zero iraf.ccdred.ccdproc.flatcor = True iraf.ccdred.ccdproc.flat = flat iraf.ccdred.ccdproc(images=imagesin)
def masterbias(biasre, output='Zero', combine='median', reject='minmax', ccdtype='', rdnoise='rdnoise', gain='gain'): '''run the task ccdred.zerocombine with chosen parameters Input: ------- str biasre: regular expression to identify zero level images Output: ------- file Zero.fits: combined zerolevel images ''' biaslist = glob.glob(biasre) biasstring = ', '.join(biaslist) # load packages iraf.imred() iraf.ccdred() # unlearn settings iraf.imred.unlearn() iraf.ccdred.unlearn() iraf.ccdred.ccdproc.unlearn() iraf.ccdred.combine.unlearn() iraf.ccdred.zerocombine.unlearn() iraf.ccdred.setinstrument.unlearn() # setup task iraf.ccdred.zerocombine.output = output iraf.ccdred.zerocombine.combine = combine iraf.ccdred.zerocombine.reject = reject iraf.ccdred.zerocombine.ccdtype = ccdtype iraf.ccdred.zerocombine.rdnoise = rdnoise iraf.ccdred.zerocombine.gain = gain # run task iraf.ccdred.zerocombine(input=biasstring)
def divflat(imagesre, flat='Flat'): '''Run ccdproc task to images''' imageslist = glob.glob(imagesre) imagesin = ', '.join(imageslist) # Load packages iraf.imred() iraf.ccdred() # Unlearn settings iraf.ccdred.ccdproc.unlearn() iraf.ccdred.combine.unlearn() # Setup and run task iraf.ccdred.ccdproc.ccdtype = '' iraf.ccdred.ccdproc.noproc = False iraf.ccdred.ccdproc.fixpix = False iraf.ccdred.ccdproc.overscan = False iraf.ccdred.ccdproc.darkcor = False iraf.ccdred.ccdproc.illumcor = False iraf.ccdred.ccdproc.fringecor = False iraf.ccdred.ccdproc.readcor = False iraf.ccdred.ccdproc.scancor = False iraf.ccdred.ccdproc.trim = False iraf.ccdred.ccdproc.trimsec = '' iraf.ccdred.ccdproc.readaxis = 'line' iraf.ccdred.ccdproc.zerocor = False iraf.ccdred.ccdproc.zero = '' iraf.ccdred.ccdproc.flatcor = True iraf.ccdred.ccdproc.flat = flat iraf.ccdred.ccdproc(images=imagesin)
def coroverbiastrim(lstfile): iraf.noao() iraf.imred() iraf.ccdred() x1,x2,y1,y2 = get_trim_sec() iraf.ccdproc(images = '@' + lstfile + '//[1]' , output = '%bo%bo%@' + lstfile , ccdtype = '', max_cache = 0, noproc = False , fixpix = False, overscan = True, trim = False , zerocor = True, darkcor = False, flatcor = False , illumcor = False, fringecor = False, readcor = False , scancor = False, readaxis = 'line', fixfile = '' , biassec = '[5:45,%s:%s]'%(y1,y2), trimsec = '[%s:%s,%s:%s]'%(x1,x2,y1,y2) , zero = 'Zero', dark = '', flat = '', illum = '', fringe = '' , minreplace = 1.0, scantype = 'shortscan', nscan = 1 , interactive = False, function = 'chebyshev', order = 1 , sample = '*', naverage = 1, niterate = 1 , low_reject = 3.0, high_reject = 3.0, grow = 1.0) iraf.ccdproc(images = '%bo%bo%@' + lstfile , output = '%tbo%tbo%@' + lstfile , ccdtype = '', max_cache = 0, noproc = False , fixpix = False, overscan = False, trim = True , zerocor = False, darkcor = False, flatcor = False , illumcor = False, fringecor = False, readcor = False , scancor = False, readaxis = 'line', fixfile = '' , biassec = '[5:45,%s:%s]'%(y1,y2), trimsec = '[%s:%s,%s:%s]'%(x1,x2,y1,y2) , zero = 'Zero', dark = '', flat = '', illum = '', fringe = '' , minreplace = 1.0, scantype = 'shortscan', nscan = 1 , interactive = False, function = 'chebyshev', order = 1 , sample = '*', naverage = 1, niterate = 1 , low_reject = 3.0, high_reject = 3.0, grow = 1.0) iraf.flpr()
def wl_cal(filename, outname, cals): print 'run wavelength calibration...' if os.path.isfile(outname): print 'file %s is already exist' % outname else: iraf.imred() iraf.specred() print 'The calibration file is listed below:' for i in xrange(len(cals)): print i, cals[i], get_fits_objname(cals[i]) valget = 0 if len(cals) > 1: valget = raw_input('Which one are you want to use?:') while type(eval(valget)) != int: valget = raw_input('input is not a int type, please reinput:') valget = int(valget) iraf.refspectra(input = filename , references = cals[valget], apertures = '', refaps = '' , ignoreaps = True, select = 'interp', sort = '' , group = '', time = False, timewrap = 17.0 , override = False, confirm = True, assign = True , logfiles = 'STDOUT,logfile', verbose = False, answer = 'yes') print 'make file ' + outname + '...' iraf.dispcor(input = filename , output = outname, linearize = True, database = 'database' , table = '', w1 = 'INDEF', w2 = 'INDEF', dw = 'INDEF' , nw = 'INDEF', log = False, flux = True, blank = 0.0 , samedisp = False, ignoreaps = True , confirm = False, listonly = False, verbose = True , logfile = '') print 'splot %s' % outname iraf.splot(images = outname)
def subzero(imagesre, zero='Zero'): '''Run ccdproc remove Zero level noise''' imageslist = glob.glob(imagesre) imagesin = ', '.join(imageslist) # Load packages iraf.imred() iraf.ccdred() # Unlearn previouse settings iraf.ccdred.ccdproc.unlearn() iraf.ccdred.combine.unlearn() # setup and run task iraf.ccdred.ccdproc.ccdtype = '' iraf.ccdred.ccdproc.noproc = False iraf.ccdred.ccdproc.fixpix = False iraf.ccdred.ccdproc.overscan = False iraf.ccdred.ccdproc.darkcor = False iraf.ccdred.ccdproc.illumcor = False iraf.ccdred.ccdproc.fringecor = False iraf.ccdred.ccdproc.readcor = False iraf.ccdred.ccdproc.scancor = False iraf.ccdred.ccdproc.trim = False iraf.ccdred.ccdproc.trimsec = '' iraf.ccdred.ccdproc.readaxis = 'line' iraf.ccdred.ccdproc.zerocor = True iraf.ccdred.ccdproc.zero = zero iraf.ccdred.ccdproc.flatcor = False iraf.ccdred.ccdproc.flat = '' iraf.ccdred.ccdproc(images=imagesin)
def scalewavelenght(calspec): ''' Creates a wavelenght solution for 'calspec' ''' iraf.imred() iraf.specred() linelist = str(raw_input('Enter file with list of lines (linelists$thar.dat) : ')) if linelist == '': linelist = 'linelists$thar.dat' iraf.specred.identify.coordlist = linelist iraf.specred.identify(images=calspec)
def combinebias(filename): iraf.noao() iraf.imred() iraf.ccdred() iraf.zerocombine(input = 'o//@' + filename , output = 'Zero', combine = 'average', reject = 'minmax' , ccdtype = '', process = False, delete = False , clobber = False, scale = 'none', statsec = '' , nlow = 0, nhigh = 1, nkeep = 1, mclip = True , lsigma = 3.0, hsigma = 3.0, rdnoise = 'rdnoise' , gain = 'gain', snoise = 0.0, pclip = -0.5, blank = 0.0)
def main(): iraf.noao() iraf.imred() iraf.ccdred() print '=' * 20, 'Overscan', '=' * 20 correct_overscan('spec.lst') print '=' * 20, 'combine bias', '=' * 20 combine_bias('bias.lst') print '=' * 20, 'correct bias', '=' * 20 correct_bias('spec_no_bias.lst', 'bias_spec.fits') name = os.popen('ls object*.lst').readlines() name = [i.split()[0] for i in name] for i in name: ntrim_flat(i)
def flatresponse(input='Flat', output='nFlat'): ''' normalize Flat to correct illumination patterns''' iraf.imred() iraf.ccdred() iraf.specred() iraf.ccdred.combine.unlearn() iraf.ccdred.ccdproc.unlearn() iraf.specred.response.unlearn() iraf.specred.response.interactive = True iraf.specred.response.function = 'chebyshev' iraf.specred.response.order = 1 iraf.specred.response(calibration=input, normalization=input, response=output)
def ImportPackages(): iraf.noao(_doprint=0) iraf.rv(_doprint=0) iraf.imred(_doprint=0) iraf.kpnoslit(_doprint=0) iraf.ccdred(_doprint=0) iraf.astutil(_doprint=0) iraf.keywpars.setParam('ra','CAT-RA') iraf.keywpars.setParam('dec','CAT-DEC') iraf.keywpars.setParam('ut','UT') iraf.keywpars.setParam('utmiddl','UT-M_E') iraf.keywpars.setParam('exptime','EXPTIME') iraf.keywpars.setParam('epoch','CAT-EPOC') iraf.keywpars.setParam('date_ob','DATE-OBS') iraf.keywpars.setParam('hjd','HJD') iraf.keywpars.setParam('mjd_obs','MJD-OBS') iraf.keywpars.setParam('vobs','VOBS') iraf.keywpars.setParam('vrel','VREL') iraf.keywpars.setParam('vhelio','VHELIO') iraf.keywpars.setParam('vlsr','VLSR') iraf.keywpars.setParam('vsun','VSUN') iraf.keywpars.setParam('mode','ql') iraf.fxcor.setParam('continu','both') iraf.fxcor.setParam('filter','none') iraf.fxcor.setParam('rebin','smallest') iraf.fxcor.setParam('pixcorr','no') iraf.fxcor.setParam('apodize','0.2') iraf.fxcor.setParam('function','gaussian') iraf.fxcor.setParam('width','INDEF') iraf.fxcor.setParam('height','0.') iraf.fxcor.setParam('peak','no') iraf.fxcor.setParam('minwidt','3.') iraf.fxcor.setParam('maxwidt','21.') iraf.fxcor.setParam('weights','1.') iraf.fxcor.setParam('backgro','0.') iraf.fxcor.setParam('window','INDEF') iraf.fxcor.setParam('wincent','INDEF') iraf.fxcor.setParam('verbose','long') iraf.fxcor.setParam('imupdat','no') iraf.fxcor.setParam('graphic','stdgraph') iraf.fxcor.setParam('interac','yes') iraf.fxcor.setParam('autowri','yes') iraf.fxcor.setParam('autodra','yes') iraf.fxcor.setParam('ccftype','image') iraf.fxcor.setParam('observa','lapalma') iraf.fxcor.setParam('mode','ql') return 0
def corhalogen(lstfile): iraf.noao() iraf.imred() iraf.ccdred() iraf.ccdproc(images = 'tbo@' + lstfile , output = '%ftbo%ftbo%@' + lstfile , ccdtype = '', max_cache = 0, noproc = False , fixpix = False, overscan = False, trim = False , zerocor = False, darkcor = False, flatcor = True , illumcor = False, fringecor = False, readcor = False , scancor = False, readaxis = 'line', fixfile = '' , biassec = '', trimsec = '' , zero = 'Zero', dark = '', flat = 'Resp', illum = '', fringe = '' , minreplace = 1.0, scantype = 'shortscan', nscan = 1 , interactive = False, function = 'chebyshev', order = 1 , sample = '*', naverage = 1, niterate = 1 , low_reject = 3.0, high_reject = 3.0, grow = 1.0) iraf.flpr()
def coroverscan(filename): iraf.noao() iraf.imred() iraf.ccdred() iraf.ccdproc(images = '@' + filename + '//[1]' , output = '%o%o%@' + filename , ccdtype = '', max_cache = 0, noproc = False , fixpix = False, overscan = True, trim = False , zerocor = False, darkcor = False, flatcor = False , illumcor = False, fringecor = False, readcor = False , scancor = False, readaxis = 'line', fixfile = '' , biassec = '[5:45,1:4612]', trimsec = '', zero = '' , dark = '', flat = '', illum = '', fringe = '' , minreplace = 1.0, scantype = 'shortscan', nscan = 1 , interactive = False, function = 'chebyshev', order = 1 , sample = '*', naverage = 1, niterate = 1 , low_reject = 3.0, high_reject = 3.0, grow = 1.0) iraf.flpr()
def runapall(imagesre, gain='gain', rdnoise='rdnoise'): '''Extract aperture spectra for science images ...''' imageslist = glob.glob(imagesre) imagesin = ', '.join(imageslist) # load packages iraf.imred() iraf.ccdred() iraf.specred() # unlearn previous settings iraf.ccdred.combine.unlearn() iraf.ccdred.ccdproc.unlearn() iraf.specred.apall.unlearn() # setup and run task iraf.specred.apall.format = 'onedspec' iraf.specred.apall.readnoise = rdnoise iraf.specred.apall.gain = gain iraf.specred.apall(input=imagesin)
def findaperture(img, _interactive=False): # print "LOGX:: Entering `findaperture` method/function in %(__file__)s" % # globals() import re import string import os from pyraf import iraf import ntt iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) toforget = ['specred.apfind'] for t in toforget: iraf.unlearn(t) iraf.specred.databas = 'database' iraf.specred.dispaxi = 2 iraf.specred.apedit.thresho = 0 dv = ntt.dvex() grism = ntt.util.readkey3(ntt.util.readhdr(img), 'grism') if _interactive: _interac = 'yes' _edit = 'yes' else: _interac = 'no' _edit = 'no' if os.path.isfile('database/ap' + re.sub('.fits', '', img)): ntt.util.delete('database/ap' + re.sub('.fits', '', img)) xx = iraf.specred.apfind(img, interac=_interac, find='yes', recenter='yes', edit=_edit, resize='no', aperture=1, Stdout=1, nfind=1, line=dv['line'][grism], nsum=50, mode='h') try: for line in open('database/ap' + re.sub('.fits', '', img)): if "center" in line: center = float(string.split(line)[1]) except: center = 9999 return center
def combine_flat(lstfile): iraf.noao() iraf.imred() iraf.ccdred() iraf.flatcombine(input = 'tbo//@' + lstfile , output = 'Halogen', combine = 'average', reject = 'crreject' , ccdtype = '', process = False, subsets = False , delete = False, clobber = False, scale = 'mode' , statsec = '', nlow = 1, nhigh = 1, nkeep = 1 , mclip = True, lsigma = 3.0, hsigma = 3.0 , rdnoise = 'rdnoise', gain = 'gain', snoise = 0.0 , pclip = -0.5, blank = 1.0) script_path = os.path.split(os.path.realpath(__file__))[0] iraf.twodspec() iraf.longslit(dispaxis = 2, nsum = 1, observatory = 'observatory' , extinction = script_path + os.sep + 'LJextinct.dat' , caldir = script_path + os.sep + 'standarddir' + os.sep, interp = 'poly5') iraf.response(calibration = 'Halogen' , normalization = 'Halogen', response = 'Resp' , interactive = True, threshold = 'INDEF', sample = '*' , naverage = 1, function = 'spline3', order = 25 , low_reject = 10.0, high_reject = 10.0, niterate = 1 , grow = 0.0, graphics = 'stdgraph', cursor = '')
def masterflat(flatre, output='Flat', combine='median', reject='sigclip', scale='mode', rdnoise='rdnoise', gain='gain'): '''combine flat images with the task ccdred.flatcombine Input: ------- str: flatre - regular expression to bias files in the current directory Output: ------- file: Flat.fits - combined flat field images ''' flatlist = glob.glob(flatre) flatstring = ', '.join(flatlist) # load packages iraf.imred() iraf.ccdred() # unlearn settings iraf.imred.unlearn() iraf.ccdred.unlearn() iraf.ccdred.ccdproc.unlearn() iraf.ccdred.combine.unlearn() iraf.ccdred.flatcombine.unlearn() iraf.ccdred.setinstrument.unlearn() # setup task iraf.ccdred.flatcombine.output = output iraf.ccdred.flatcombine.combine = combine iraf.ccdred.flatcombine.reject = reject iraf.ccdred.flatcombine.ccdtype = '' iraf.ccdred.flatcombine.process = 'no' iraf.ccdred.flatcombine.subsets = 'yes' iraf.ccdred.flatcombine.scale = scale iraf.ccdred.flatcombine.rdnoise = rdnoise iraf.ccdred.flatcombine.gain = gain iraf.ccdred.flatcombine(input=flatstring)
import pyraf from pyraf import iraf import copy, os, shutil, glob, sys, string, re, math, operator, time import pyfits from types import * from mx.DateTime import * from iqpkg import * import ephem # Necessary packages iraf.images() iraf.immatch() iraf.imfilter() iraf.noao() iraf.imred() iraf.ccdred() iraf.digiphot() iraf.apphot() yes=iraf.yes no=iraf.no INDEF=iraf.INDEF hedit=iraf.hedit imgets=iraf.imgets imcombine=iraf.imcombine pyrafdir="python/pyraf/" pyrafdir_key='PYRAFPARS' if os.environ.has_key(pyrafdir_key):
def makeflat(lista): # print "LOGX:: Entering `makeflat` method/function in %(__file__)s" % # globals() flat = '' import datetime import glob import os import ntt from ntt.util import readhdr, readkey3, delete, name_duplicate, updateheader, correctcard from pyraf import iraf iraf.images(_doprint=0) iraf.imutil(_doprint=0) iraf.imgeom(_doprint=0) # iraf.blkavg(_doprint=0) iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.generic(_doprint=0) toforget = ['imgeom.blkavg', 'imutil.imarith', 'immatch.imcombine', 'noao.imred'] for t in toforget: iraf.unlearn(t) import datetime MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days _date = readkey3(readhdr(lista[0]), 'date-night') _filter = readkey3(readhdr(lista[0]), 'filter') output = name_duplicate( lista[3], 'flat_' + str(_date) + '_' + str(_filter) + '_' + str(MJDtoday), '') if os.path.isfile(output): answ = raw_input('file already prooduced, do again [y/[n]] ? ') if not answ: answ = 'n' else: answ = 'y' if answ in ['yes', 'y', 'YES', 'Y', 'Yes']: delete("temp_off.fits,temp_off_mask.fits,temp_on_mask.fits,temp_on.fits") iraf.image.immatch.imcombine( lista[0] + ',' + lista[7], output="temp_off.fits") iraf.image.immatch.imcombine( lista[1] + ',' + lista[6], output="temp_off_mask.fits") iraf.image.immatch.imcombine( lista[2] + ',' + lista[5], output="temp_on_mask.fits") iraf.image.immatch.imcombine( lista[3] + ',' + lista[4], output="temp_on.fits") # create the bias correction for the flat-on according to the # Lidman technique0 delete("temp_onA.fits,temp_onC.fits,temp_onB.fits,temp_onAC.fits,temp_onACB.fits,temp_onACB_2D.fits") delete("temp_on_bias.fits") iraf.imgeom.blkavg( input="temp_on.fits[500:600,*]", output="temp_onA.fits", option="average", b1=101, b2=1) iraf.imgeom.blkavg( input="temp_on_mask.fits[500:600,*]", output="temp_onC.fits", option="average", b1=101, b2=1) iraf.imgeom.blkavg( input="temp_on_mask.fits[50:150,*]", output="temp_onB.fits", option="average", b1=101, b2=1) iraf.imutil.imarith("temp_onA.fits", "-", "temp_onC.fits", "temp_onAC.fits") iraf.imutil.imarith("temp_onAC.fits", "+", "temp_onB.fits", "temp_onACB.fits") iraf.imgeom.blkrep(input="temp_onACB.fits", output="temp_onACB_2D.fits", b1=1024, b2=1) iraf.imutil.imarith("temp_on.fits", "-", "temp_onACB_2D.fits", "temp_on_bias.fits") # same as above for the flat-off delete("temp_offA.fits,temp_offC.fits,temp_offB.fits,temp_offAC.fits,temp_offACB.fits,temp_offACB_2D.fits") delete("temp_off_bias.fits") iraf.imgeom.blkavg( input="temp_off.fits[500:600,*]", output="temp_offA.fits", option="average", b1=101, b2=1) iraf.imgeom.blkavg( input="temp_off_mask.fits[500:600,*]", output="temp_offC.fits", option="average", b1=101, b2=1) iraf.imgeom.blkavg( input="temp_off_mask.fits[50:150,*]", output="temp_offB.fits", option="average", b1=101, b2=1) iraf.imutil.imarith("temp_offA.fits", "-", "temp_offC.fits", "temp_offAC.fits") iraf.imutil.imarith("temp_offAC.fits", "+", "temp_offB.fits", "temp_offACB.fits") iraf.imgeom.blkrep(input="temp_offACB.fits", output="temp_offACB_2D.fits", b1=1024, b2=1) iraf.imutil.imarith("temp_off.fits", "-", "temp_offACB_2D.fits", "temp_off_bias.fits") # create the corrected flat-field # output=name_duplicate("temp_on_bias.fits",'flat_'+str(_date)+'_'+str(_filter)+'_'+str(MJDtoday),'') output = name_duplicate( lista[3], 'flat_' + str(_date) + '_' + str(_filter) + '_' + str(MJDtoday), '') # print lista[0],'flat_'+str(_date)+'_'+str(_filter)+'_'+str(MJDtoday) delete(output) iraf.imutil.imarith("temp_on_bias.fits", "-", "temp_off_bias.fits", output) iraf.noao.imred.generic.normalize(output) # normalize the flat-field correctcard(output) delete("temp_on*.fits") # delete the temporary images delete("temp_off*.fits") print 'flat -> ' + str(output) else: print 'skip redoing the flat' return output
def extractSpectra(): """ Extract 1D spectra using IRAF interactively Interpolate across the two arcs either side to get the most accurate wavelength solution TODO: Finish docstring Add method of using super arc for inital identify """ # load IRAF from the location of the login.cl file here = os.getcwd() os.chdir(loginCl_location) from pyraf import iraf os.chdir(here) time.sleep(2) # make a list of the science images to be analysed templist = g.glob('i_s*') # import IRAF packages for spectroscopy iraf.imred(_doprint=0) iraf.kpnoslit(_doprint=0) # apall parameters iraf.apall.setParam('format', 'multispec') iraf.apall.setParam('interac', 'yes') iraf.apall.setParam('find', 'yes') iraf.apall.setParam('recen', 'yes') iraf.apall.setParam('resize', 'yes') iraf.apall.setParam('trace', 'yes') iraf.apall.setParam('fittrac', 'yes') iraf.apall.setParam('extract', 'yes') iraf.apall.setParam('extras', 'yes') iraf.apall.setParam('review', 'yes') iraf.apall.setParam('line', 'INDEF') iraf.apall.setParam('nsum', '12') iraf.apall.setParam('lower', '-6') iraf.apall.setParam('upper', '6') iraf.apall.setParam('b_funct', 'chebyshev') iraf.apall.setParam('b_order', '1') iraf.apall.setParam('b_sampl', '-25:-15,15:25') iraf.apall.setParam('b_naver', '-100') iraf.apall.setParam('b_niter', '0') iraf.apall.setParam('b_low_r', '3') iraf.apall.setParam('b_high', '3') iraf.apall.setParam('b_grow', '0') iraf.apall.setParam('width', '10') iraf.apall.setParam('radius', '10') iraf.apall.setParam('threshold', '0') iraf.apall.setParam('nfind', '1') iraf.apall.setParam('t_nsum', '10') iraf.apall.setParam('t_step', '10') iraf.apall.setParam('t_nlost', '3') iraf.apall.setParam('t_niter', '7') iraf.apall.setParam('t_funct', 'spline3') iraf.apall.setParam('t_order', '3') iraf.apall.setParam('backgro', 'fit') iraf.apall.setParam('skybox', '1') iraf.apall.setParam('weights', 'variance') iraf.apall.setParam('pfit', 'fit1d') iraf.apall.setParam('clean', 'yes') iraf.apall.setParam('saturat', SATURATION) iraf.apall.setParam('readnoi', RDNOISE) iraf.apall.setParam('gain', GAIN) iraf.apall.setParam('lsigma', '4.0') iraf.apall.setParam('usigma', '4.0') iraf.apall.setParam('nsubaps', '1') iraf.apall.saveParList(filename="apall.pars") # make reference arc for reidentify if '.' in args.refarc: args.refarc = args.refarc.split('.')[0] refarc = "a_s_{}_t.fits".format(args.refarc) refarc_out = "a_s_{}_t.ms.fits".format(args.refarc) # loop over all the the spectra for i in range(0, len(templist)): hdulist = fits.open(templist[i]) prihdr = hdulist[0].header target_id = prihdr['CAT-NAME'] spectrum_id = int(templist[i].split('_')[2].split('r')[1]) if args.ds9: os.system('xpaset fuckingds9 fits < {}'.format(templist[i])) # extract the object spectrum print("[{}/{}] Extracting spectrum of {} from image {}".format(i+1, len(templist), target_id, templist[i])) print("[{}/{}] Check aperture and background. Change if required".format(i+1, len(templist))) print("[{}/{}] AP: m = mark aperture, d = delete aperture".format(i+1, len(templist))) print("[{}/{}] SKY: s = mark sky, t = delete sky, f = refit".format(i+1, len(templist))) print("[{}/{}] q = continue".format(i+1, len(templist))) iraf.apall(input=templist[i]) print("Spectrum extracted!") # find the arcs either side of the object arclist = [] arc1 = "a_s_r{0:d}_t.fits".format(spectrum_id-1) arc2 = "a_s_r{0:d}_t.fits".format(spectrum_id+1) arc1_out = "a_s_r{0:d}_t.ms.fits".format(spectrum_id-1) arc2_out = "a_s_r{0:d}_t.ms.fits".format(spectrum_id+1) # predict the arc names print("\nPredicting arcs names...") print("Arc1: {}".format(arc1)) print("Arc2: {}".format(arc2)) # setup a reference filename for the arc conditions in database reffile = templist[i].split('.fits')[0] # extract the arcs print("\nExtracting arcs under the same conditions...") if os.path.exists(arc1): iraf.apall(input=arc1, reference=reffile, recente="no", trace="no", backgro="no", interac="no") print("Arc1 {} extracted".format(arc1)) arclist.append(arc1_out) else: print("\n\nArc1 {} FILE NOT FOUND\n\n".format(arc1)) if os.path.exists(arc2): iraf.apall(input=arc2, reference=reffile, recente="no", trace="no", backgro="no", interac="no") print("Arc2 {} extracted".format(arc2)) arclist.append(arc2_out) else: print("\n\nArc2 {} FILE NOT FOUND\n\n".format(arc2)) # get a list of the extracted arcs and objects spectrum_out = "i_s_r{0:d}_t.ms.fits".format(spectrum_id) if i == 0: # extract the master reference arc print("\nExtracting master arc {} under the same conditions...".format(refarc)) iraf.apall(input=refarc, reference=reffile, recente="no", trace="no", backgro="no", interac="no") print("Reference arc {} extracted".format(refarc)) # identify the lines in it print("\nIdentify arc lines:") print("Enter the following in the splot window") print("\t:thres 500") print("\t:order 1, max = 3") print("\tfwidth 2") print("Select 3-5 arc lines from line atlas") print("Press 'm' to mark, then enter wavelength") print("Then press 'l' to automatically ID the other lines") print("Press 'f' to fit the dispersion correction") print("Use 'd' to remove bad points, 'f' to refit") print("'q' from fit, then 'q' from identify to continue\n") iraf.identify(images=refarc_out, coordlist=lineList_location) # use the refarc to ID all the subsequent arcs for arc in arclist: print("\nReidentifying arclines from {}".format(arc)) iraf.reidentify(reference=refarc_out, images=arc) # add the refspec keywords to the image header for dispcor # refspec_factor tells IRAF how to interpolate the arcs refspec_factor = round((1./len(arclist)), 1) for i in range(0, len(arclist)): refspec = "{} {}".format(arclist[i].split(".fits")[0], refspec_factor) print("REFSPEC{}: {}".format(i+1, refspec)) iraf.hedit(images=spectrum_out, fields="REFSPEC{}".format(i+1), value=refspec, add="yes", verify="no", show="yes") print("Headers updated!\n") # apply the dispersion correction print("Applying the dispersion correction") iraf.dispcor(input=spectrum_out, output=spectrum_out, lineari="yes", databas="database", table="") print("Correction applied!") # normalize the spectrum using continuum normspec_out = "{}n.ms.fits".format(spectrum_out.split('.ms')[0]) iraf.continuum(input=spectrum_out, output=normspec_out, logfile="logfile", interac="yes", functio="spline3", order="5", niterat="10", markrej="yes") print("\n\n")
def create_masterguide(lfiles, out=None): ''' Receives a list of guider images for the same object. It will remove the bias from it, combine them using the median, and comput the astrometry for the image. ''' curdir = os.getcwd() if len(lfiles) == 0: return else: os.chdir(os.path.abspath(os.path.dirname(lfiles[0]))) fffile = "/tmp/l_guide" np.savetxt(fffile, np.array(lfiles), fmt="%s") #If the bias file exists in the directory, we use it, otherwise we pass bias_fast = "Bias_rc_fast.fits" debias = os.path.isfile(bias_fast) if debias: debiased = ["b_" + os.path.basename(img) for img in lfiles] bffile = "/tmp/lb_guider" np.savetxt(bffile, np.array(debiased), fmt="%s") if (out is None): obj = fitsutils.get_par(img, "OBJECT") out = os.path.join(os.path.dirname(img), obj.replace(" ", "").replace(":", "") + ".fits") # Running IRAF iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) #Remove bias from the guider images if debias: try: iraf.imarith("@" + fffile, "-", bias_fast, "@" + bffile) except IrafError: iraf.imarith("@" + fffile, "-", bias_fast, "@" + bffile) else: bffile = fffile #Combine flats iraf.imcombine(input = "@"+bffile, \ output = out, \ combine = "median",\ scale = "mode", reject = "sigclip", lsigma = 2., hsigma = 2, gain=1.7, rdnoise=4.) iraf.imstat(out, fields="image,npix,mean,stddev,min,max,mode", Stdout="guide_stats") #st = np.genfromtxt("guide_stats", names=True, dtype=None) #Do some cleaning if debias: logger.info('Removing from lfiles') for f in debiased: if os.path.isfile(f): os.remove(f) if os.path.isfile(fffile): os.remove(fffile) if os.path.isfile(bffile): os.remove(bffile) solve_astrometry(out, overwrite=True) os.chdir(curdir)
def create_masterbias(biasdir=None, channel='rc'): ''' Combines slow and fast readout mode biases for the specified channel. ''' iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) if (biasdir is None) or biasdir == "": biasdir = "." outs = "Bias_%s_slow.fits" % channel outf = "Bias_%s_fast.fits" % channel doslow = True dofast = True if (os.path.isfile(os.path.join(biasdir, outs))): logger.warn("%s master Bias exists!" % outs) doslow = False if (os.path.isfile(os.path.join(biasdir, outf))): logger.warn("%s master Bias exists!" % outs) dofast = False if (doslow or dofast): logger.info("Starting the Master Bias creation!") else: return os.chdir(biasdir) lfastbias = [] lslowbias = [] #Select all filts that are Bias with same instrument for f in glob.glob("rc*fits"): try: if ("BIAS" in str.upper(fitsutils.get_par(f, "IMGTYPE").upper())): if (fitsutils.get_par(f, "ADCSPEED") == 2): lfastbias.append(f) else: lslowbias.append(f) except: pass logger.info("Files for bias SLOW mode: %s" % lslowbias) logger.info("Files for bias FAST mode: %s" % lfastbias) if len(lfastbias) > 0 and dofast: bfile_fast = "lbias_fast_" + channel np.savetxt(bfile_fast, np.array(lfastbias), fmt="%s") if (os.path.isfile("Bias_stats_fast")): os.remove("Bias_stats_fast") iraf.imstat("@" + bfile_fast, Stdout="Bias_stats_fast") st = np.genfromtxt("Bias_stats_fast", names=True, dtype=None) logger.info("%s" % st) iraf.imcombine(input = "@"+bfile_fast, \ output = outf, \ combine = "median",\ scale = "mode") os.remove(bfile_fast) #copy into the reference folder with current date newdir = os.path.join("../../refphot/", os.path.basename(os.path.abspath(biasdir))) if (not os.path.isdir(newdir)): os.makedirs(newdir) shutil.copy(outf, os.path.join(newdir, os.path.basename(outf))) else: copy_ref_calib(biasdir, "Bias") if len(lslowbias) > 0 and doslow: bfile_slow = "lbias_slow_" + channel np.savetxt(bfile_slow, np.array(lslowbias), fmt="%s") if (os.path.isfile("Bias_stats_slow")): os.remove("Bias_stats_slow") iraf.imstat("@" + bfile_slow, Stdout="Bias_stats_slow") st = np.genfromtxt("Bias_stats_slow", names=True, dtype=None) logger.info("%s" % st) iraf.imcombine(input = "@"+bfile_slow, \ output = outs, \ combine = "median",\ scale = "mode") os.remove(bfile_slow) #copy into the reference folder with current date newdir = os.path.join("../../refphot/", os.path.basename(os.path.abspath(biasdir))) if (not os.path.isdir(newdir)): os.makedirs(newdir) shutil.copy(outs, os.path.join(newdir, os.path.basename(outs))) else: copy_ref_calib(biasdir, outs)
def create_masterbias(biasdir=None, channel='rc'): ''' Combines slow and fast readout mode biases for the specified channel. ''' iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) if (biasdir is None) or biasdir=="": biasdir = "." outs = "Bias_%s_slow.fits"%channel outf = "Bias_%s_fast.fits"%channel doslow = True dofast = True if (os.path.isfile(os.path.join(biasdir,outs))): logger.warn( "%s master Bias exists!"%outs) doslow = False if ( os.path.isfile(os.path.join(biasdir,outf))): logger.warn("%s master Bias exists!"%outs) dofast = False if(doslow or dofast): logger.info("Starting the Master Bias creation!") else: return os.chdir(biasdir) lfastbias = [] lslowbias = [] #Select all filts that are Bias with same instrument for f in glob.glob("rc*fits"): try: if ( "BIAS" in str.upper(fitsutils.get_par(f, "IMGTYPE")) ): if (fitsutils.get_par(f, "ADCSPEED")==2): lfastbias.append(f) else: lslowbias.append(f) except: pass logger.info("Files for bias SLOW mode: %s"% lslowbias) logger.info( "Files for bias FAST mode: %s"% lfastbias) if len(lfastbias) > 0 and dofast: bfile_fast ="lbias_fast_"+channel np.savetxt(bfile_fast, np.array(lfastbias), fmt="%s") if (os.path.isfile("Bias_stats_fast")): os.remove("Bias_stats_fast") iraf.imstat("@"+bfile_fast, Stdout="Bias_stats_fast") st = np.genfromtxt("Bias_stats_fast", names=True, dtype=None) logger.info("%s"%st) iraf.imcombine(input = "@"+bfile_fast, \ output = outf, \ combine = "median",\ scale = "mode") os.remove(bfile_fast) #copy into the reference folder with current date newdir = os.path.join("../../refphot/", os.path.basename(os.path.abspath(biasdir))) if (not os.path.isdir(newdir)): os.makedirs(newdir) shutil.copy(outf, os.path.join(newdir, os.path.basename(outf)) ) else: copy_ref_calib(biasdir, outf) if len(lslowbias) > 0 and doslow: bfile_slow ="lbias_slow_"+channel np.savetxt(bfile_slow, np.array(lslowbias), fmt="%s") if (os.path.isfile("Bias_stats_slow")): os.remove("Bias_stats_slow") iraf.imstat("@"+bfile_slow, Stdout="Bias_stats_slow") st = np.genfromtxt("Bias_stats_slow", names=True, dtype=None) logger.info("%s"%st) iraf.imcombine(input = "@"+bfile_slow, \ output = outs, \ combine = "median",\ scale = "mode") os.remove(bfile_slow) #copy into the reference folder with current date newdir = os.path.join("../../refphot/", os.path.basename(os.path.abspath(biasdir))) if (not os.path.isdir(newdir)): os.makedirs(newdir) shutil.copy(outs, os.path.join(newdir, os.path.basename(outs)) ) else: copy_ref_calib(biasdir, outs)
def sensfunction(standardfile, _function, _order, _interactive): # print "LOGX:: Entering `sensfunction` method/function in %(__file__)s" % # globals() import re import os import sys import ntt import datetime try: import pyfits # added later except: from astropy.io import fits as pyfits from pyraf import iraf import numpy as np MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) toforget = ['specred.scopy', 'specred.sensfunc', 'specred.standard'] for t in toforget: iraf.unlearn(t) iraf.specred.scopy.format = 'multispec' iraf.specred.verbose = 'no' hdrs = ntt.util.readhdr(standardfile) try: _outputsens = 'sens_' + str(ntt.util.readkey3(hdrs, 'date-night')) + '_' + \ str(ntt.util.readkey3(hdrs, 'grism')) + '_' + str(ntt.util.readkey3(hdrs, 'filter')) + '_' + \ re.sub('.dat', '', ntt.util.readkey3( hdrs, 'stdname')) + '_' + str(MJDtoday) except: sys.exit('Error: missing header -stdname- in standard ' + str(standardfile) + ' ') _outputsens = ntt.util.name_duplicate(standardfile, _outputsens, '') if os.path.isfile(_outputsens): if _interactive.lower() != 'yes': ntt.util.delete(_outputsens) else: answ = raw_input( 'sensitivity function already computed, do you want to do it again [[y]/n] ? ') if not answ: answ = 'y' if answ.lower() in ['y', 'yes']: ntt.util.delete(_outputsens) if not os.path.isfile(_outputsens): iraf.set(direc=ntt.__path__[0] + '/') _caldir = 'direc$standard/MAB/' _extinctdir = 'direc$standard/extinction/' _observatory = 'lasilla' _extinction = 'lasilla2.txt' refstar = 'm' + \ re.sub('.dat', '', pyfits.open(standardfile) [0].header.get('stdname')) _airmass = ntt.util.readkey3(hdrs, 'airmass') _exptime = ntt.util.readkey3(hdrs, 'exptime') _outputstd = 'std_' + str(ntt.util.readkey3(hdrs, 'grism')) + '_' + \ str(ntt.util.readkey3(hdrs, 'filter')) + '.fits' ntt.util.delete(_outputstd) ntt.util.delete(_outputsens) iraf.specred.standard(input=standardfile, output=_outputstd, extinct=_extinctdir + _extinction, caldir=_caldir, observa=_observatory, star_nam=refstar, airmass=_airmass, exptime=_exptime, interac=_interactive) iraf.specred.sensfunc(standard=_outputstd, sensitiv=_outputsens, extinct=_extinctdir + _extinction, ignorea='yes', observa=_observatory, functio=_function, order=_order, interac=_interactive) data, hdr = pyfits.getdata(standardfile, 0, header=True) # added later data1, hdr1 = pyfits.getdata( _outputsens, 0, header=True) # added later ntt.util.delete(_outputsens) # added later pyfits.writeto(_outputsens, np.float32(data1), hdr) # added later return _outputsens
def efoscreduction(imglist, _interactive, _doflat, _dobias, listflat, listbias, _dobadpixel, badpixelmask, fringingmask, _archive, typefile, filenameobjects, _system, _cosmic, _verbose=False, method='iraf'): # print "LOGX:: Entering `efoscreduction` method/function in %(__file__)s" # % globals() import ntt from ntt.efoscphotredudef import searchbias from ntt.util import delete, readhdr, readkey3, display_image, searchflat, rangedata, correctcard from numpy import argmin, min, abs, sqrt import string import os import re import math import sys from pyraf import iraf # ## Call and set parameters for useful iraf tasks iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.proto(_doprint=0) toforget = ['ccdproc', 'zerocombine', 'flatcombine', 'imreplace', 'proto.fixpix'] for t in toforget: iraf.unlearn(t) iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.overscan = 'no' iraf.ccdproc.ccdtype = '' iraf.ccdproc.biassec = '' iraf.ccdred.instrument = "/dev/null" if _verbose: iraf.ccdred.verbose = 'yes' else: iraf.ccdred.verbose = 'no' import datetime import time # starttime=time.time() now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days outputfile = [] reduceddata = rangedata(imglist) img = re.sub('\n', '', imglist[0]) hdr = readhdr(img) _gain = readkey3(hdr, 'gain') _rdnoise = readkey3(hdr, 'ron') _instrume = readkey3(hdr, 'instrume') _trimsec = '[3:1010,1:1015]' biaslist = {} flatlist1 = {} flatlist2 = {} objectlist = {} filterlist1 = [] filterlist2 = [] for img in imglist: _type = '' img = re.sub('\n', '', img) hdr = readhdr(img) _naxis1 = readkey3(hdr, 'NAXIS1') _naxis2 = readkey3(hdr, 'NAXIS2') if _naxis1 != 1030 or _naxis2 != 1030: ntt.util.writeinthelog( 'image ' + str(img) + ' different dimension =\n', './logNTT.txt') _type = 'not good' if not _type and readkey3(hdr, 'speed') != 'fastL': _type = 'not good' if not _type and readkey3(hdr, 'instrume') != 'efosc': _type = 'not good' _imagetype = readkey3(hdr, 'tech') if not _type and _imagetype == 'SPECTRUM': _type = 'spectroscopic data' if not _type: _exptime = readkey3(hdr, 'exptime') _date = readkey3(hdr, 'date-night') _filter = readkey3(hdr, 'filter') if float(_exptime) == 0.0: if _date not in biaslist: biaslist[_date] = [] biaslist[_date].append(img) _type = 'bias' if not _type: _object = readkey3(hdr, 'object') if _filter.lower() in ['g782', 'r784', 'z623', 'u640', 'b639', 'v641', 'r642', 'i705'] and _imagetype == 'IMAGE': if 'sky,flat' in _object.lower(): _type = 'flat' elif 'dome' in _object.lower() or 'flat' in _object.lower(): _type = 'flat dome' if _type == 'flat': if _filter not in filterlist1: filterlist1.append(_filter) flatlist1[_filter] = [] flatlist1[_filter].append(img) if _type == 'flat dome': if _filter not in filterlist2: filterlist2.append(_filter) flatlist2[_filter] = [] flatlist2[_filter].append(img) if not _type: _catg = readkey3(hdr, 'catg') if 'science' in _catg.lower() or 'acquisition' in _catg.lower(): _type = 'object' if _filter not in objectlist: objectlist[_filter] = [] objectlist[_filter].append(img) if 'acquisition' in _catg.lower(): try: correctcard(img) _ra1, _dec1, _name = ntt.util.correctobject( img, 'standard_efosc_mab.txt') _ra1, _dec1, _name = ntt.util.correctobject( img, filenameobjects) except: pass elif 'focus' in _object.lower(): _type = 'not good' if not _type: print '\n### warning: object not recognized ' _object = readkey3(hdr, 'object') print img, _object, _imagetype answ = raw_input( 'what is it: bias [1], flat [3], object[4], test [5] ? [5] ') if not answ: answ = '5' if answ == '1': if _date not in biaslist: biaslist[_date] = () biaslist[_date].append(img) elif answ == '4': if _filter not in objectlist: objectlist[_filter] = [] objectlist[_filter].append(img) elif answ == '3': tt = raw_input('dome or sky [d/[s]] ? ') if tt == 's': _type = 'flat' _filter = readkey3(hdr, 'filter') if _filter not in filterlist1: filterlist1.append(_filter) flatlist1[_filter] = [] flatlist1[_filter].append(img) elif tt == 'd': _type = 'flat dome' _filter = readkey3(hdr, 'filter') if _filter not in filterlist2: filterlist2.append(_filter) flatlist2[_filter] = [] flatlist2[_filter].append(img) elif answ == '5': _type = 'not good' filterlist = list(set(filterlist1 + filterlist2)) if _verbose: print filterlist1 print filterlist2 print flatlist1 print flatlist2 flatlist = {} for _filt in filterlist: if _filt not in flatlist1.keys(): if _filt in flatlist2.keys(): if len(flatlist2[_filt]) >= 3: flatlist[_filt] = flatlist2[_filt] elif len(flatlist1[_filt]) < 3: if _filt in flatlist2.keys(): if len(flatlist2[_filt]) >= 3: flatlist[_filt] = flatlist2[_filt] elif _filt in flatlist1.keys(): if len(flatlist1[_filt]) >= 3: flatlist[_filt] = flatlist1[_filt] listaout = [] if _verbose: print '\n### flat ', str(flatlist), '\n' print '\n### bias ', str(biaslist), '\n' print '\n### object ', str(objectlist), '\n' ###### masterbias ################# if _dobias: if not _archive: if listbias: masterbiaslist = listbias else: masterbiaslist = [] if biaslist: for _date in biaslist: print '\n do bias ' + str(_date) + '\n' biaslist[_date] = rejectbias( biaslist[_date], False, 10) if len(biaslist[_date]) >= 3: masterbiasfile = 'bias_' + \ str(_date) + '_' + str(MJDtoday) + '.fits' delete(masterbiasfile) f = open('biaslist', 'w') h = open('obiaslist', 'w') for img in biaslist[_date]: f.write(img + '\n') h.write('o' + img + '\n') delete('o' + img) f.close() h.close() try: print 'processing bias .....' iraf.ccdproc('@biaslist', output='@obiaslist', overscan="no", trim="yes", zerocor='no', fixpix='no', ccdtype='', flatcor='no', darkcor='no', biassec='', trimsec=str(_trimsec), readaxi='column', Stdout=1) iraf.zerocombine('@obiaslist', output=masterbiasfile, combine='median', reject='ccdclip', ccdtype='', process='no', rdnoise=_rdnoise, gain=_gain, Stdout=1) correctcard(masterbiasfile) num = 0 for img in biaslist[_date]: num = num + 1 ntt.util.updateheader(masterbiasfile, 0, { 'PROV' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file']}) ntt.util.updateheader(masterbiasfile, 0, { 'TRACE' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file']}) delete('o' + img) ntt.util.updateheader(masterbiasfile, 0, {'M_EPOCH': [False, 'TRUE if resulting from multiple epochs']}) ntt.util.updateheader(masterbiasfile, 0, {'SINGLEXP': [False, 'TRUE if resulting from single exposure']}) ntt.util.updateheader(masterbiasfile, 0, { 'FILETYPE': [11201, 'bias']}) masterbiaslist.append(masterbiasfile) if masterbiasfile not in outputfile: outputfile.append(masterbiasfile) except: ntt.util.writeinthelog( 'Warning ' + str(biaslist[_date]) + ' problem with this list of bias \n', './logNTT.txt') if masterbiasfile and _interactive: aa, bb, cc = display_image( masterbiasfile, 1, '', '', False) answ = raw_input( 'is the masterbias ok [[y]/n] ?') if not answ: answ = 'y' if answ in ['n', 'no']: sys.exit( 'remove bad bias from input list and restart') else: masterbiaslist = [] ########## masterflat ######################### if _doflat: if not _archive: if listflat: masterflatlist = listflat else: masterflatlist = [] if flatlist: for _filter in flatlist: print '\n do flat ' + str(_filter) + '\n' flatlist[_filter] = rejectflat( flatlist[_filter], False) if len(flatlist[_filter]) >= 3: _date = readkey3( readhdr(flatlist[_filter][0]), 'date-night') masterflat = 'flat_' + \ str(_date) + '_' + str(_filter) + \ '_' + str(MJDtoday) + '.fits' listaflat = 'flatlist_' + \ str(_date) + '_' + str(_filter) _bias = '' if masterbiaslist: _bias = searchbias(flatlist[_filter][ 0], masterbiaslist)[0] if not _bias: _bias = searchbias(flatlist[_filter][0], '')[0] if _bias: if _bias[0] == '/': os.system('cp ' + _bias + ' .') _bias = string.split(_bias, '/')[-1] _zerocor = 'yes' else: _zerocor = 'yes' else: _zerocor = 'no' answ0 = 'n' while answ0 != 'y': f = open(listaflat, 'w') h = open('o' + listaflat, 'w') for img in flatlist[_filter]: f.write(img + '\n') h.write('o' + img + '\n') delete('o' + img) f.close() h.close() try: print 'processing flat .....' iraf.ccdproc('@' + listaflat, output='@o' + listaflat, overscan='no', trim='yes', darkcor='no', fixpix='no', zerocor=_zerocor, flatcor='no', trimsec=str(_trimsec), biassec='', zero=_bias, readaxi='column', ccdtype='', Stdout=1) delete(masterflat) iraf.flatcombine('@o' + listaflat, output=masterflat, combine='average', reject='avsigclip', ccdtype='', process='no', rdnoise=_rdnoise, gain=_gain, statsec='[100:800,100:800]', lsigma=3, hsigma=2, Stdout=1) masterflatlist.append(masterflat) correctcard(masterflat) num = 0 for img in flatlist[_filter]: num = num + 1 ntt.util.updateheader(masterflat, 0, { 'PROV' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file']}) ntt.util.updateheader(masterflat, 0, { 'TRACE' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file']}) delete('o' + img) ntt.util.updateheader( masterflat, 0, {'ZEROCOR': [_bias, '']}) ntt.util.updateheader(masterflat, 0, { 'M_EPOCH': [False, 'TRUE if resulting from multiple epochs']}) ntt.util.updateheader(masterflat, 0, { 'SINGLEXP': [False, 'TRUE if resulting from single exposure']}) ntt.util.updateheader( masterflat, 0, {'FILETYPE': [11202, 'flat field']}) if masterflat not in outputfile: outputfile.append(masterflat) except: ntt.util.writeinthelog( 'Warning ' + str(flatlist[ _filter]) + ' problem with this list of flat \n', './logNTT.txt') aa, bb, cc = display_image( masterflat, 1, '', '', False) if masterflat and _interactive: answ = raw_input( 'is the masterflat ok [[y]/n] ?') if not answ: answ = 'y' if answ.lower() in ['n', 'no']: answ1 = raw_input( 'try again [[y]/n] ?') if not answ1: answ1 = 'y' if answ1.lower() in ['y', 'yes']: flatlist[_filter] = ntt.efoscphotredudef.rejectflat( flatlist[_filter], True) else: sys.exit( 'error: problem with flat .... exit') else: answ0 = 'y' else: answ0 = 'y' else: masterflatlist = [] ########################################################################## if len(masterbiaslist) == 0: masterbiaslist = '' if len(masterflatlist) == 0: masterflatlist = '' ###################################### if _verbose: print '' print '#############################' print masterflatlist print masterbiaslist print '#############################' print '' if masterflatlist: listaout = listaout + masterflatlist if masterbiaslist: listaout = listaout + masterbiaslist if typefile == 'calib': objectlist = {} for _filter in objectlist: for img in objectlist[_filter]: hdr = readhdr(img) print '\n#####################################################################\n' _object = readkey3(hdr, 'object') _object = re.sub(' ', '', _object) _object = re.sub('/', '_', _object) _object = re.sub('\n', '', _object) _exptime = readkey3(hdr, 'exptime') _date = readkey3(hdr, 'date-night') nameout = ntt.util.name_duplicate(img, str(_object) + '_' + str(_date) + '_' + str(_filter) + '_' + str( MJDtoday), '') _bias = '' if _dobias: if masterbiaslist: _bias = searchbias(img, masterbiaslist)[0] if not _bias: _bias = searchbias(img, '')[0] _flat = '' if _doflat: if masterflatlist: _flat = searchflat(img, masterflatlist)[0] if not _flat: _flat = searchflat(img, '')[0] if _bias: # bias ### if _bias[0] == '/': os.system('cp ' + _bias + ' .') _bias = string.split(_bias, '/')[-1] _zerocor = 'yes' else: _zerocor = 'no' else: _zerocor = 'no' if _flat: # flat ### if _flat[0] == '/': os.system('cp ' + _flat + ' .') _flat = string.split(_flat, '/')[-1] _flatcor = 'yes' else: _flatcor = 'no' sss = str(_object) + '_' + str(_date) + '_' + str(_filter) print '### input', img, sss print '### bias ', _zerocor, _bias print '### flat ', _flatcor, _flat print '### name ', nameout delete(nameout) try: iraf.ccdproc(img, output=nameout, overscan="no", trim="yes", zerocor=_zerocor, flatcor='no', darkcor='no', trimsec=str(_trimsec), zero=_bias, biassec='', readaxi='column', Stdout=1) try: iraf.ccdproc(nameout, output='', overscan="no", trim="no", zerocor='no', flatcor=_flatcor, darkcor='no', flat=_flat, readaxi='column', ccdtype='', Stdout=1) except: iraf.imrepla(images=_flat, value=0.01, lower='INDEF', upper=0.01, radius=0) iraf.ccdproc(nameout, output='', overscan="no", trim="no", zerocor='no', flatcor=_flatcor, darkcor='no', flat=_flat, readaxi='column', ccdtype='', Stdout=1) correctcard(nameout) ntt.util.updateheader(nameout, 0, {'FILTER': [readkey3(readhdr(nameout), 'filter'), 'Filter name'], 'SINGLEXP': [True, 'TRUE if resulting from single exposure'], 'M_EPOCH': [False, 'TRUE if resulting from multiple epochs'], 'FLATCOR': [_flat, ''], 'ZEROCOR': [_bias, ''], 'FILETYPE': [12204, 'pre-reduced image'], 'PROV1': [readkey3(readhdr(nameout), 'ARCFILE'), 'Originating file'], 'NCOMBINE': [1, 'Number of raw science data'], 'TRACE1': [readkey3(readhdr(nameout), 'ARCFILE'), 'Originating file']}) ntt.util.airmass(nameout) # phase 3 definitions ntt.util.writeinthelog('\n', './logNTT.txt') ntt.util.writeinthelog( 'image= ' + str(img) + ' output= ' + str(nameout) + '\n', './logNTT.txt') ntt.util.writeinthelog( 'bias= ' + str(_bias) + ', flat= ' + str(_flat) + '\n', './logNTT.txt') ntt.util.writeinthelog('\n', './logNTT.txt') if nameout not in outputfile: outputfile.append(nameout) except: ntt.util.writeinthelog( 'image ' + str(img) + ' probably corrupted\n', './logNTT.txt') if _dobadpixel: if not badpixelmask: badpixelmask = 'bad_pixel_mask.fits' delete(badpixelmask) os.system('cp ' + ntt.__path__[0] + '/archive/' + str( _instrume) + '/badpixels/badpixel.fits ' + badpixelmask) iraf.proto.fixpix(images=nameout, masks=badpixelmask, linterp='INDEF', cinterp='INDEF', verbose='no') ntt.util.updateheader( nameout, 0, {'FIXPIX': [badpixelmask, '']}) ntt.util.writeinthelog('image ' + str(nameout) + ' bad pixel corrected with ' + badpixelmask + '\n', './logNTT.txt') print '\n### bad pixel mask correction ..... done' else: ntt.util.writeinthelog( 'image ' + str(nameout) + ' bad pixel NOT corrected\n', './logNTT.txt') if _cosmic: try: print '\n### cosmic ..... ' ntt.cosmics.lacos_im(nameout, _output='', gain=_gain, readn=_rdnoise, xorder=9, yorder=9, sigclip=4.5, sigfrac=0.5, objlim=1, skyval=0, niter=0, verbose=True, interactive=False) ntt.util.updateheader(nameout, 0, { 'LACOSMIC': [True, 'TRUE if Laplacian cosmic ray rejection has been applied to the image']}) print '\n### cosmic ..... removed ' except Exception, e: print e else: ntt.util.updateheader(nameout, 0, { 'LACOSMIC': [False, 'TRUE if Laplacian cosmic ray rejection has been applied to the image']}) try: ########################## sexvec = ntt.efoscastrodef.sextractor(nameout) for cat in ['2mass', 'usnoa2', 'usnob1']: rmsx3, rmsy3, num3, fwhmgess, ellgess, ccc, rasys3, decsys3, magsat3 = ntt.efoscastrodef.efoscastroloop( [nameout], cat, False, 40, 40, 100, 'rxyscale', 100, 30, sexvec, True, 10, method) if rmsx3 <= 2 and rmsy3 <= 2: break if rmsx3 > 2 and rmsy3 > 2: for cat in ['2mass', 'usnoa2', 'usnob1']: rmsx3, rmsy3, num3, fwhmgess, ellgess, ccc, rasys3, decsys3, magsat3 = ntt.efoscastrodef.efoscastroloop( [nameout], cat, False, 20, int(20), int(50), 'rxyscale', 100, 30, sexvec, True, 5, method) if rmsx3 <= 2 and rmsy3 <= 2: break if rmsx3 > 2 and rmsy3 > 2: for cat in ['2mass', 'usnoa2', 'usnob1']: rmsx3, rmsy3, num3, fwhmgess, ellgess, ccc, rasys3, decsys3, magsat3 = ntt.efoscastrodef.efoscastroloop( [nameout], cat, False, int(10), int(10), int(25), 'rxyscale', 100, 30, sexvec, True, int(3), method) ########################## astrostring = str(rmsx3) + ' ' + str(rmsy3) + ' ' + str(num3) ntt.util.updateheader( nameout, 0, {'ASTROMET': [astrostring, 'rmsx rmsy nstars']}) print '\n### check astrometry: fine \n### rmsx rmsy nstars: ' + astrostring except Exception, e: print e rmsx3, rmsy3, num3, fwhmgess, ellgess, ccc, rasys3, decsys3, magsat3 = '', '', '', '', '', '', '', '', '' print '\n### problem with astrometry, do you have network ? ' if fwhmgess and fwhmgess < 99: ntt.util.updateheader(nameout, 0, {'PSF_FWHM': [fwhmgess, 'Spatial resolution (arcsec)'], 'ELLIPTIC': [ellgess, 'Average ellipticity of point sources'], 'CRDER1': [(1 / sqrt(2.)) * float(rmsx3) * (1. / 3600.), 'Random error (degree)'], 'CRDER2': [(1 / sqrt(2.)) * float(rmsy3) * (1. / 3600.), 'Random error (degree)'], 'CUNIT1': ['deg', 'unit of the coord. trans.'], 'CUNIT2': ['deg', 'unit of the coord. trans.'], 'CSYER1': [rasys3, 'Systematic error (RA_m - Ra_ref)'], 'CSYER2': [decsys3, 'Systematic error (DEC_m - DEC_ref)']}) else: ntt.util.updateheader(nameout, 0, {'PSF_FWHM': [9999., 'FHWM (arcsec) - computed with sectractor'], 'ELLIPTIC': [9999., 'ellipticity of point sources (1-b/a)'], 'CRDER1': [9999., 'Random error in axis 1'], 'CRDER2': [9999., 'Random error in axis 2'], 'CUNIT1': ['deg', 'unit of the coord. trans.'], 'CUNIT2': ['deg', 'unit of the coord. trans.'], 'CSYER1': [9999., 'Systematic error (RA_m - Ra_ref)'], 'CSYER2': [9999., 'Systematic error (DEC_m - DEC_ref)']}) try: result = ntt.efoscastrodef.zeropoint( nameout, _system, method, False, False) except: result = '' if result: if os.path.isfile(re.sub('.fits', '.ph', nameout)): if re.sub('.fits', '.ph', nameout) not in outputfile: outputfile.append( re.sub('.fits', '.ph', nameout)) print '\n### zeropoint ..... done' for ll in result: valore = '%3.3s %6.6s %6.6s' % ( str(ll), str(result[ll][1]), str(result[ll][0])) print '### ', valore ntt.util.updateheader( nameout, 0, {'zp' + ll: [str(valore), '']}) if magsat3: if readkey3(readhdr(nameout), 'FLUXCAL') == 'ABSOLUTE': try: ntt.util.updateheader(nameout, 0, { 'ABMAGSAT': [float(magsat3) + float(readkey3(readhdr(nameout)), 'PHOTZP'), 'Saturation limit for point sources (AB mags)']}) except: ntt.util.updateheader(nameout, 0, { 'ABMAGSAT': [float(magsat3), 'Saturation limit for point sources (AB mags)']}) else: ntt.util.updateheader(nameout, 0, { 'ABMAGSAT': [float(magsat3), 'Saturation limit for point sources (AB mags)']}) else: ntt.util.updateheader(nameout, 0, {'ABMAGSAT': [ 9999., 'Saturation limit for point sources (AB mags)']}) maglim = ntt.util.limmag(nameout) if maglim: ntt.util.updateheader(nameout, 0, {'ABMAGLIM': [maglim, '5-sigma limiting AB magnitude for point sources']}) else: ntt.util.updateheader(nameout, 0, {'ABMAGLIM': [9999., '5-sigma limiting AB magnitude for point sources']}) if readkey3(readhdr(nameout), 'filter') in ['i705']: try: nameout, maskname = ntt.efoscphotredudef.fringing2( nameout, fringingmask, _interactive, False) if nameout not in outputfile: outputfile.append(nameout) if maskname not in outputfile: outputfile.append(maskname) except: ntt.util.writeinthelog( 'image ' + str(nameout) + ' probably corrupted\n', './logNTT.txt') print '\n### problem with fringing correction'
def create_superflat(imdir, filters=["u", "g", "r", "i"]): #Locate images for each filter imlist = glob.glob("rc*fits") #Run sextractor to locate bright sources sexfiles = sextractor.run_sex(imlist, overwrite=False) maskfiles = [] for i, im in enumerate(imlist): #Create a mask and store it int he mask directory maskfile = mask_stars(im, sexfiles[i]) maskfiles.append(maskfile) fitsutils.update_par(im, "BPM", os.path.relpath(maskfile)) for filt in filters: fimlist = [ im for im in imlist if fitsutils.get_par(im, "FILTER") == filt ] fmasklist = [ im for im in maskfiles if fitsutils.get_par(im, "FILTER") == filt ] if len(fimlist) == 0: continue fsfile = "lflat_%s" % filt msfile = "lmask_%s" % filt np.savetxt(fsfile, np.array(fimlist), fmt="%s") np.savetxt(msfile, np.array(fmasklist), fmt="%s") '''masklist = [] for m in fmasklist: hdulist = fits.open(m) data = hdulist[0].data masklist.append(data) masklist = np.array(masklist) hdu = fits.PrimaryHDU(masklist) hdulist = fits.HDUList([hdu]) hdulist.writeto("mastermask_%s.fits"%filt)''' # Running IRAF iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.imarith("@" + fsfile, "*", "@" + msfile, "m_@" + fsfile) #Combine flats iraf.imcombine(input = "m_@"+fsfile, \ output = "superflat_%s.fits"%filt, \ combine = "median",\ scale = "mode", \ masktype="badvalue",\ maskvalue = 0) iraf.imstat("superflat_%s.fits" % filt, fields="image,npix,mean,stddev,min,max,mode", Stdout="Flat_stats") time.sleep(0.1) st = np.genfromtxt("Flat_stats", names=True, dtype=None) #Normalize flats iraf.imarith("superflat_%s.fits" % filt, "/", st["MODE"], "superflat_%s_norm.fits" % filt)
def darkcom(imlist_name, camera='KL4040'): """ 1. Description : This function makes a master dark image of KCT using Pyraf. Put dark images to 201X-XX-XX/dark/ directory. Run this code on 201XXXXX directory, then pyraf chdir task enter dark directory and makes process. Output image is darkXXX.fits (XXX is exposure time. This function will classify each exposure of dark frames!) and it will be copied on upper directory. Due to iraf.chdir task, you should reset python when this code is finished in order to make confusion of current directory between iraf and python! 2. Usage : Start on 2018-XX-XX directory. Make dark directory which contains each dark frame. Naming of each dark image should be dark*.fit. Then just use darkcom(). >>> darkcom() 3. History 2018.03 Created by G.Lim. 2018.12.20 Edited by G.Lim. Define SAO_darkcom function. 2019.02.07 Assign archive of masterdark in each date by G. Lim 2020.03.01 Modified for KL4040 process 2020.03.06 Modified for KCT STX16803 process """ import glob import os, sys import numpy as np from pyraf import iraf from astropy.io import fits savedir = '/data1/KCT/' curdir = os.getcwd() ''' yy = curdir.split('/')[-1].split('-')[0] mm = curdir.split('/')[-1].split('-')[1] dd = curdir.split('/')[-1].split('-')[2] curdate = yy+mm+dd ''' curdate = curdir.split('/')[-1] iraf.noao() iraf.imred() iraf.ccdred() iraf.ccdred.setinst(instrume='camera', directo='/iraf/iraf/noao/imred/ccdred/ccddb/', query='q', review='no') iraf.chdir('./dark') print('zero subtraction...') os.system('ls dark*.fit > dark.list') iraf.imarith(operand1='@dark.list', op='-', operand2='../*_zero.fits', result='*****@*****.**') zdark = glob.glob('zdark*.fit') allexptime = [] for i in range(len(zdark)): hdr = fits.getheader(zdark[i]) allexptime.append(hdr['exptime']) expset = set(allexptime) exptime = list(sorted(expset)) i = 0 for i in range(len(exptime)): print('Find images with exptime of ' + str(exptime[i])) imlist = [] for j in range(len(zdark)): hdr = fits.getheader(zdark[j]) if hdr['exptime'] == exptime[i]: imlist.append(zdark[j]) else: pass print(imlist) output_name = curdate + '_dark' + str(int(exptime[i])) + '.fits' input_name = output_name[:-5] + '.list' f = open(input_name, 'w+') for k in range(len(imlist)): f.write(imlist[k] + '\n') f.close() print('Darkcombine is running...') iraf.imstat('@' + input_name) iraf.darkcombine(input='@' + input_name, output=output_name, combine='median', reject='minmax', process='no', scale='none', ccdtype='') os.system('/usr/bin/cp ' + output_name + ' ../') os.system('mkdir ' + savedir + 'masterdark') os.system('/usr/bin/cp ' + output_name + ' ' + savedir + 'masterdark/') os.system('/usr/bin/rm d*.list') iraf.chdir('../') iraf.dir('.') print('Output master ' + output_name + ' is created.')
def biascom(imlist_name, camera='STX16803'): """ 1. Description : This function makes a master bias image of KCT using Pyraf. Put bias images to 20XX-XX-XX/bias/ directory. Run this code on 20XX-XX-XX directory, then pyraf chdir task enter bias directory and makes process. Output image is zero.fits and it will be copied on upper directory. Due to iraf.chdir task, you should reset python when this code is finished in order to make confusion of current directory between iraf and python! 2. Usage : Start on 2018-XX-XX directory. Make bias directory which contains each bias frame. Naming of each bias images should be zero*.fit. Then just use biascom() >>> biascom() 3. History 2018.03 Created by G.Lim. 2018.12.17 Edited by G.Lim. Define SAO_biascom function. 2019.02.07 Assign archive of masterbias in each date by G. Lim 2020.03.01 Modified for KL4040 process 2020.03.06 Modified for KCT STX16803 process """ import glob import os, sys import numpy as np from pyraf import iraf from astropy.io import fits savedir = '/data1/KCT/' curdir = os.getcwd() ''' yy = curdir.split('/')[-1].split('-')[0] mm = curdir.split('/')[-1].split('-')[1] dd = curdir.split('/')[-1].split('-')[2] curdate = yy+mm+dd ''' curdate = curdir.split('/')[-1] iraf.noao() iraf.imred() iraf.ccdred() iraf.ccdred.setinst(instrume='camera', directo='/iraf/iraf/noao/imred/ccdred/ccddb/', query='q', review='no') iraf.chdir('./bias') input_name = 'bias.list' output_name = curdate + '_zero.fits' #os.system('ls cal*bias.fit > '+input_name) calibrations = glob.glob(imlist_name) f = open(input_name, 'w+') for i in range(len(calibrations)): hdr = fits.getheader(calibrations[i]) IMAGETYP = hdr['IMAGETYP'] if IMAGETYP == 'Bias Frame': f.write(calibrations[i] + '\n') f.close() print('Zerocombine is running...') iraf.zerocombine(input='@' + input_name, output=output_name, combine='median', reject='minmax', process='no', scale='none', ccdtype='') print('Output master ' + output_name + ' is created.') os.system('/usr/bin/cp ' + output_name + ' ../') os.system('mkdir ' + savedir + 'masterbias') os.system('/usr/bin/cp ' + output_name + ' ' + savedir + 'masterbias/') iraf.chdir('../') iraf.dir('.')
def flatcom(camera='STX16803', flattype='sky', dark=False): """ 1. Description : This function makes master-normalised images of KCT using Pyraf. Put flat images to 20XX-XX-XX/skyflat/ directory. Run this code on 20XX-XX-XX directory, then pyraf chdir task enter skyflat, directory and makes process. If dark=True, dark subtraction will perform on flat images. Use this keyword when you think dark subtraction is needed for flat images. If not, only bias subtraction will be perfromed. And then flatcombine and normalizing will be performed. Output image is nflatX.YYY.fits (X is filter and YYY is sky or dome. This function will classify each exposure of frames!) and they will be copied on upper directory. Due to iraf.chdir task, you should reset python when this code is finished in order to make confusion of current directory between iraf and python! 2. Usage : Start on 20XX-XX-XX directory. Make skyflat or domeflat directory which contains each flat frame. Naming of each flat image should be *flat*.fit. And domeflat naming is Domeflat*.fit. Then just use flatcom(). >>> flatcom('sky') --> Use skyflat >>> flatcom('dome') --> Use domeflat *Default configuration is skyflat. 3. History 2018.03 Created by G. Lim. 2018.12.20 Edited by G. Lim. Define SAO_flatcom function. 2018.12.28 Edited by G. Lim. Add bias or dark keyword. Join function is used when performing imarith, combine tasks. 2019.02.07 Assign archive of masterflat in each date by G. Lim 2020.03.01 Remove process keyword from STX16803. 2020.03.06 Modified for KCT STX16803 process """ import glob import os, sys import itertools import numpy as np from pyraf import iraf from astropy.io import fits from astropy.io import ascii iraf.noao() iraf.imred() iraf.ccdred() iraf.ccdred.setinst(instrume='camera', directo='/iraf/iraf/noao/imred/ccdred/ccddb/', query='q', review='no') curdir = os.getcwd() ''' yy = curdir.split('/')[-1].split('-')[0] mm = curdir.split('/')[-1].split('-')[1] dd = curdir.split('/')[-1].split('-')[2] curdate = yy+mm+dd ''' curdate = curdir.split('/')[-1] savedir = '/data1/KCT/' if flattype == 'dome': iraf.chdir('./domeflat') elif flattype == 'sky': iraf.chdir('./skyflat') flat = glob.glob('*flat*.fit') flat.sort() input_name = 'flat.list' k = 0 f = open(input_name, 'w+') for k in range(len(flat)): f.write(flat[k] + '\n') f.close() print('zero subtraction with ' + flattype + 'flat images...') iraf.imarith(operand1='@' + input_name, op='-', operand2='../*_zero.fits', result='z@' + input_name) if dark == True: zflat = glob.glob('z*flat*.fit') i = 0 allexptime = [] for i in range(len(zflat)): hdr = fits.getheader(zflat[i]) allexptime.append(hdr['exptime']) expset = set(allexptime) exptime = list(sorted(expset)) i = 0 for i in range(len(exptime)): print('Find images with exptime of ' + str(int(exptime[i]))) imlist = [] j = 0 for j in range(len(zflat)): hdr = fits.getheader(zflat[j]) if hdr['exptime'] == exptime[i]: imlist.append(zflat[j]) else: pass print(imlist) imlist.sort() input_name = 'zflat.list' k = 0 f = open(input_name, 'w+') for k in range(len(imlist)): f.write(imlist[k] + '\n') f.close() iraf.imarith(operand1='@' + input_name, op='-', operand2='../*_dark' + str(int(exptime[i])) + '.fits', result='d@' + input_name) #iraf.imarith(operand1=darkjoin, op='-', operand2='../dark'+str(int(exptime[i]))+'.fits', result=outdarkjoin) print('Dark subtracted flat images are created.') # Flat combine if dark == True: calflat = glob.glob('dz*flat*.fit') elif dark == False: calflat = glob.glob('z*flat*.fit') calflat.sort() allfilter = [] i = 0 for i in range(len(calflat)): hdr = fits.getheader(calflat[i]) allfilter.append(hdr['filter']) filterset = set(allfilter) infilter = list(sorted(filterset)) i = 0 for i in range(len(infilter)): print('Find images with filter of ' + str(infilter[i])) calimlist = [] for j in range(len(calflat)): hdr = fits.getheader(calflat[j]) if hdr['filter'] == infilter[i]: calimlist.append(calflat[j]) else: pass print(calimlist) calimlist.sort() input_name = str(infilter[i]) + 'flat.list' k = 0 f = open(input_name, 'w+') for k in range(len(calimlist)): f.write(calimlist[k] + '\n') f.close() output_name = input_name[:-5] + '.fits' iraf.flatcombine(input='@' + input_name, output=output_name, combine='average', reject='crreject', process='no', scale='mode', ccdtype='', lsigma='3.', hsigma='3.') print(output_name + ' is created. Normalizing...') data, newhdr = fits.getdata(output_name, header=True) x = np.mean(data) nimage = data / x newflat_name = curdate + '_n' + str( infilter[i]) + 'flat.' + flattype + '.fits' fits.writeto(newflat_name, nimage, header=newhdr, overwrite=True) os.system('/usr/bin/cp ' + newflat_name + ' ../') os.system('mkdir ' + savedir + 'masterflat_' + infilter[i] + '/') os.system('/usr/bin/cp ' + newflat_name + ' ' + savedir + 'masterflat_' + infilter[i] + '/') print('Normalised master flats are created.') iraf.imstat(images='*n?flat.' + flattype + '.fits') os.system('/usr/bin/rm *.list ?flat.fits') iraf.chdir('../') iraf.dir('./')
def objpre(sci_list='*-00*.fit'): """ 1. Description : This function applies master calibration images to science frames, including bias, dark subtraction, flat fielding. 2. Usage : objpre('*-00*.fit') 3. History 2018.03 Created by G. Lim 2019.02.07 Change name of master calibration frames in each date by G. Lim 2020.03.01 Bias subtraction is added for KL4040. 2020.03.06 Modified for KCT STX16803 process """ import glob import os, sys import itertools import numpy as np from pyraf import iraf from astropy.io import fits from astropy.io import ascii curdir = os.getcwd() ''' yy = curdir.split('/')[-1].split('-')[0] mm = curdir.split('/')[-1].split('-')[1] dd = curdir.split('/')[-1].split('-')[2] curdate = yy+mm+dd ''' curdate = curdir.split('/')[-1] savedir = '/data1/KCT/' iraf.noao() iraf.imred() iraf.ccdred() iraf.ccdred.setinst(instrume='camera', directo='/iraf/iraf/noao/imred/ccdred/ccddb/', query='q', review='no') #obj_list = '*-00*.fit' #obj = glob.glob(obj_list) # Bias subtraction os.system('ls ' + sci_list + ' > sci.list') print('zero subtraction with skyflat images...') iraf.imarith(operand1='@sci.list', op='-', operand2='./*_zero.fits', result='*****@*****.**') obj = glob.glob('z' + sci_list) # dark subtraction allexptime = [] i = 0 for i in range(len(obj)): hdr = fits.getheader(obj[i]) allexptime.append(hdr['exptime']) expset = set(allexptime) exptime = list(sorted(expset)) i, j, k = 0, 0, 0 for i in range(len(exptime)): print('Find images with exptime of ' + str(exptime[i])) imlist = [] for j in range(len(obj)): hdr = fits.getheader(obj[j]) if hdr['exptime'] == exptime[i]: imlist.append(obj[j]) else: pass print(imlist) imlist.sort() print('Creating object list for dark subtraction...') f = open("obj" + str(int(exptime[i])) + ".list", 'w+') for im in range(len(imlist)): f.write(imlist[im] + "\n") f.close() #imjoin = ",".join(imlist) #outimjoin = ",d".join(imlist) #outimjoin = 'd'+outimjoin print('dark subtraction with dark' + str(int(exptime[i])) + '.fits') input_dark = glob.glob('20*dark' + str(int(exptime[i])) + '.fits')[0] iraf.imarith(operand1='@obj' + str(int(exptime[i])) + '.list', op='-', operand2=input_dark, result='d@obj' + str(int(exptime[i])) + '.list') #iraf.imarith(operand1 = imjoin, op = '-', operand2 = './dark'+str(int(exptime[i]))+'.fits', result = outimjoin) print('dark subtracted object images are created.') # Flat fielding #dobj = glob.glob('d'+obj_list) dobj = ['d' + x for x in obj] dobj.sort() allfilter = [] i = 0 for i in range(len(dobj)): hdr = fits.getheader(dobj[i]) allfilter.append(hdr['filter']) filterset = set(allfilter) infilter = list(sorted(filterset)) i, j, k = 0, 0, 0 for i in range(len(infilter)): print('Find images with filter of ' + str(infilter[i])) imlist = [] imlist.sort() for j in range(len(dobj)): hdr = fits.getheader(dobj[j]) if hdr['filter'] == infilter[i]: imlist.append(dobj[j]) else: pass print(imlist) g = open("obj" + infilter[i] + ".list", 'w+') for im in range(len(imlist)): g.write(imlist[im] + "\n") g.close() #dimjoin = ",".join(imlist) #doutimjoin = ",f".join(imlist) #doutimjoin = 'f'+doutimjoin print('Performing flat fielding...') #nflats = glob.glob('2*n'+str(infilter[i])+'flat.'+flattype+'.fits')[0] nflats = glob.glob('2*n' + str(infilter[i]) + 'flat.*.fits')[0] flattype = nflats.split('.')[1] print(nflats) #nflats = 'n'+str(infilter[i])+'flat.'+flattype+'.fits' iraf.imarith(operand1='@obj' + infilter[i] + '.list', op='/', operand2=nflats, result='f@obj' + infilter[i] + '.list') #iraf.imarith(operand1=dimjoin, op='/', operand2=nflats, result=doutimjoin) print('Flat fielding is finished. Check the images.')
def sensfunction(standardfile, _function, _order, _interactive): import re import os import sys import ntt import datetime try: import pyfits # added later except: from astropy.io import fits as pyfits from pyraf import iraf import numpy as np MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days iraf.noao(_doprint=0, Stdout=0) iraf.imred(_doprint=0, Stdout=0) iraf.specred(_doprint=0, Stdout=0) toforget = ['specred.scopy', 'specred.sensfunc', 'specred.standard'] for t in toforget: iraf.unlearn(t) iraf.specred.scopy.format = 'multispec' iraf.specred.verbose = 'no' hdrs = ntt.util.readhdr(standardfile) try: _outputsens = 'sens_' + str(ntt.util.readkey3(hdrs, 'date-night')) + '_' + \ str(ntt.util.readkey3(hdrs, 'grism')) + '_' + str(ntt.util.readkey3(hdrs, 'filter')) + '_' + \ re.sub('.dat', '', ntt.util.readkey3( hdrs, 'stdname')) + '_' + str(MJDtoday) except: sys.exit('Error: missing header -stdname- in standard ' + str(standardfile) + ' ') _outputsens = ntt.util.name_duplicate(standardfile, _outputsens, '') if os.path.isfile(_outputsens): if _interactive.lower() != 'yes': ntt.util.delete(_outputsens) else: answ = raw_input( 'sensitivity function already computed, do you want to do it again [[y]/n] ? ' ) if not answ: answ = 'y' if answ.lower() in ['y', 'yes']: ntt.util.delete(_outputsens) if not os.path.isfile(_outputsens): iraf.set(direc=ntt.__path__[0] + '/') _caldir = 'direc$standard/MAB/' _extinctdir = 'direc$standard/extinction/' _observatory = 'lasilla' _extinction = 'lasilla2.txt' refstar = 'm' + \ re.sub('.dat', '', pyfits.open(standardfile) [0].header.get('stdname')) _airmass = ntt.util.readkey3(hdrs, 'airmass') _exptime = ntt.util.readkey3(hdrs, 'exptime') _outputstd = 'std_' + str(ntt.util.readkey3(hdrs, 'grism')) + '_' + \ str(ntt.util.readkey3(hdrs, 'filter')) + '.fits' ntt.util.delete(_outputstd) ntt.util.delete(_outputsens) iraf.specred.standard(input=standardfile, output=_outputstd, extinct=_extinctdir + _extinction, caldir=_caldir, observa=_observatory, star_nam=refstar, airmass=_airmass, exptime=_exptime, interac=_interactive) iraf.specred.sensfunc(standard=_outputstd, sensitiv=_outputsens, extinct=_extinctdir + _extinction, ignorea='yes', observa=_observatory, functio=_function, order=_order, interac=_interactive) data, hdr = pyfits.getdata(standardfile, 0, header=True) # added later data1, hdr1 = pyfits.getdata(_outputsens, 0, header=True) # added later ntt.util.delete(_outputsens) # added later pyfits.writeto(_outputsens, np.float32(data1), hdr) # added later return _outputsens
def efoscspec1Dredu(files, _interactive, _ext_trace, _dispersionline, liststandard, listatmo0, _automaticex, _verbose=False): import ntt try: import pyfits except: from astropy.io import fits as pyfits import re import string import sys import os import numpy as np os.environ["PYRAF_BETA_STATUS"] = "1" _extinctdir = 'direc$standard/extinction/' _extinction = 'lasilla2.txt' _observatory = 'lasilla' import datetime now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days dv = ntt.dvex() scal = np.pi / 180. _gain = ntt.util.readkey3(ntt.util.readhdr(re.sub('\n', '', files[0])), 'gain') _rdnoise = ntt.util.readkey3(ntt.util.readhdr(re.sub('\n', '', files[0])), 'ron') std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') objectlist = {} for img in files: hdr = ntt.util.readhdr(img) img = re.sub('\n', '', img) ntt.util.correctcard(img) _ra = ntt.util.readkey3(hdr, 'RA') _dec = ntt.util.readkey3(hdr, 'DEC') _object = ntt.util.readkey3(hdr, 'object') _grism = ntt.util.readkey3(hdr, 'grism') _filter = ntt.util.readkey3(hdr, 'filter') _slit = ntt.util.readkey3(hdr, 'slit') dd = np.arccos( np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos( (_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' else: _type = 'obj' if min(dd) < 100: ntt.util.updateheader(img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) if _type not in objectlist: objectlist[_type] = {} if (_grism, _filter, _slit) not in objectlist[_type]: objectlist[_type][_grism, _filter, _slit] = [img] else: objectlist[_type][_grism, _filter, _slit].append(img) from pyraf import iraf iraf.noao(_doprint=0, Stdout=0) iraf.imred(_doprint=0, Stdout=0) iraf.specred(_doprint=0, Stdout=0) iraf.imutil(_doprint=0, Stdout=0) toforget = ['imutil.imcopy', 'specred.sarith', 'specred.standard'] for t in toforget: iraf.unlearn(t) iraf.specred.verbose = 'no' iraf.specred.dispaxi = 2 iraf.set(direc=ntt.__path__[0] + '/') sens = {} outputfile = [] if 'obj' in objectlist.keys(): tpe = 'obj' elif 'stdsens' in objectlist.keys(): tpe = 'stdsens' else: sys.exit('error: no objects and no standards in the list') for setup in objectlist[tpe]: extracted = [] listatmo = [] if setup not in sens: sens[setup] = [] if tpe == 'obj': print '\n### setup= ', setup, '\n### objects= ', objectlist['obj'][ setup], '\n' for img in objectlist['obj'][setup]: # hdr=readhdr(img) print '\n\n### next object= ', img, ' ', ntt.util.readkey3( ntt.util.readhdr(img), 'object'), '\n' if os.path.isfile(re.sub('.fits', '_ex.fits', img)): if ntt.util.readkey3( ntt.util.readhdr(re.sub('.fits', '_ex.fits', img)), 'quality') == 'Rapid': ntt.util.delete(re.sub('.fits', '_ex.fits', img)) imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'obj', automaticex=_automaticex) if not os.path.isfile(imgex): sys.exit('### error, extraction not computed') if not ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') != 0.0: # if not readkey3(readhdr(imgex),'shift'): ntt.efoscspec1Ddef.checkwavestd(imgex, _interactive) extracted.append(imgex) if imgex not in outputfile: outputfile.append(imgex) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader( imgex, 0, { 'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(imgex), 'tech').upper(), 'Data product category' ] }) ntt.util.updateheader(imgex, 0, {'TRACE1': [img, 'Originating file']}) if os.path.isfile('database/ap' + re.sub('_ex.fits', '', imgex)): if 'database/ap' + re.sub('_ex.fits', '', imgex) not in outputfile: outputfile.append('database/ap' + re.sub('_ex.fits', '', imgex)) print '\n### all object with this setup extracted\n' if liststandard: standardlist = liststandard _type = 'stdfromdreducer' else: try: standardlist = objectlist['stdsens'][setup] _type = 'stdsens' except: standardlist = '' _type = '' if _type == 'stdfromdreducer' and len(extracted) >= 1: _outputsens2 = ntt.util.searchsens(extracted[0], standardlist)[0] print '\n### using standard from reducer ' + str(_outputsens2) elif _type not in ['stdsens', 'stdfromdreducer' ] and len(extracted) >= 1: _outputsens2 = ntt.util.searchsens(extracted[0], '')[0] os.system('cp ' + _outputsens2 + ' .') _outputsens2 = string.split(_outputsens2, '/')[-1] print '\n### no standard in the list, using standard from archive' else: for simg in standardlist: print '\n### standard for setup ' + \ str(setup) + ' = ', simg, ' ', ntt.util.readkey3( ntt.util.readhdr(simg), 'object'), '\n' simgex = ntt.util.extractspectrum(simg, dv, False, False, _interactive, 'std', automaticex=_automaticex) ntt.util.updateheader( simgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']}) ntt.util.updateheader( simgex, 0, { 'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(simgex), 'tech').upper(), 'Data product category' ] }) ntt.util.updateheader(simgex, 0, {'TRACE1': [simg, 'Originating file']}) if not ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') != 0.0: # if not readkey3(readhdr(simgex),'shift'): ntt.efoscspec1Ddef.checkwavestd(simgex, _interactive) atmofile = ntt.efoscspec1Ddef.telluric_atmo( simgex) # atmo file2 ntt.util.updateheader(atmofile, 0, {'TRACE1': [simgex, 'Originating file']}) ntt.util.updateheader( atmofile, 0, {'FILETYPE': [21211, 'telluric correction 1D spectrum ']}) if tpe != 'obj' and atmofile not in outputfile: outputfile.append(atmofile) if not listatmo0: listatmo.append(atmofile) sens[setup].append(simgex) if simgex not in outputfile: outputfile.append(simgex) if setup[0] == 'Gr13' and setup[1] == 'Free': if os.path.isfile(re.sub('Free', 'GG495', simg)): print '\n### extract standard frame with blocking filter to correct for second order contamination\n' simg2 = re.sub('Free', 'GG495', simg) simgex2 = ntt.util.extractspectrum( simg2, dv, False, False, _interactive, 'std', automaticex=_automaticex) ntt.util.updateheader( simgex2, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']}) ntt.util.updateheader( simgex2, 0, { 'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(simgex2), 'tech').upper(), 'Data product category' ] }) if not ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') != 0.0: # if not readkey3(readhdr(simgex2),'shift'): ntt.efoscspec1Ddef.checkwavestd( simgex2, _interactive) ntt.util.updateheader( simgex2, 0, {'TRACE1': [simg2, 'Originating file']}) print '\n### standard available: ', sens[setup] if tpe == 'obj': if len(sens[setup]) > 1: goon = 'no' while goon != 'yes': stdused = raw_input( '\n### more than one standard for this setup, which one do you want to use [' + sens[setup][0] + '] ?') if not stdused: stdused = sens[setup][0] if os.path.isfile(stdused): goon = 'yes' else: stdused = sens[setup][0] stdvec = [stdused] else: stdvec = sens[setup] for stdused in stdvec: stdusedclean = re.sub('_ex', '_clean', stdused) ntt.util.delete(stdusedclean) iraf.specred.sarith(input1=stdused, op='/', input2=atmofile, output=stdusedclean, format='multispec') _outputsens2 = ntt.efoscspec1Ddef.sensfunction( stdusedclean, 'spline3', 16, _interactive) ntt.util.updateheader( _outputsens2, 0, {'FILETYPE': [21212, 'sensitivity function']}) ntt.util.updateheader( _outputsens2, 0, {'TRACE1': [stdused, 'Originating file']}) if setup[0] == 'Gr13' and setup[1] == 'Free': if os.path.isfile(re.sub('Free', 'GG495', stdused)): print '\n### compute sensitivity function of grim 13 with blocking filter ' \ 'to correct for second order contamination \n' stdused2 = re.sub('Free', 'GG495', stdused) if not ntt.util.readkey3(ntt.util.readhdr(stdused2), 'STDNAME'): ntt.util.updateheader( stdused2, 0, { 'STDNAME': [ ntt.util.readkey3( ntt.util.readhdr(stdused), 'STDNAME'), '' ] }) atmofile2 = ntt.efoscspec1Ddef.telluric_atmo( stdused2) # atmo file2 stdusedclean2 = re.sub('_ex', '_clean', stdused2) ntt.util.delete(stdusedclean2) iraf.specred.sarith(input1=stdused2, op='/', input2=atmofile2, output=stdusedclean2, format='multispec') _outputsens3 = ntt.efoscspec1Ddef.sensfunction( stdusedclean2, 'spline3', 16, _interactive) ntt.util.updateheader( _outputsens3, 0, {'FILETYPE': [21212, 'sensitivity function']}) ntt.util.updateheader( _outputsens3, 0, {'TRACE1': [stdused2, 'Originating file']}) _outputsens2 = correctsens(_outputsens2, _outputsens3) if _outputsens2 not in outputfile: outputfile.append(_outputsens2) if _outputsens2 and tpe == 'obj': #################################################### for img in objectlist['obj'][setup]: # flux calibrate 2d images imgd = fluxcalib2d(img, _outputsens2) ntt.util.updateheader( imgd, 0, { 'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum '] }) ntt.util.updateheader(imgd, 0, {'TRACE1': [img, 'Originating files']}) iraf.hedit(imgd, 'PRODCATG', delete='yes', update='yes', verify='no') if imgd not in outputfile: outputfile.append(imgd) #################################################### # flux calib in the standard way if not listatmo and listatmo0: listatmo = listatmo0[:] for _imgex in extracted: _airmass = ntt.util.readkey3(ntt.util.readhdr(_imgex), 'airmass') _exptime = ntt.util.readkey3(ntt.util.readhdr(_imgex), 'exptime') _imgf = re.sub('_ex.fits', '_f.fits', _imgex) ntt.util.delete(_imgf) qqq = iraf.specred.calibrate(input=_imgex, output=_imgf, sensiti=_outputsens2, extinct='yes', flux='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, ignorea='yes', exptime=_exptime, fnu='no') hedvec = { 'SENSFUN': [_outputsens2, ''], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum', ''], # 'SNR':[ntt.util.StoN(_imgf,50),'Average signal to noise ratio per pixel'], 'SNR': [ ntt.util.StoN2(_imgf, False), 'Average signal to noise ratio per pixel' ], 'BUNIT': ['erg/cm2/s/Angstrom', 'Physical unit of array values'], 'TRACE1': [_imgex, 'Originating file'], 'ASSON1': [ re.sub('_f.fits', '_2df.fits', _imgf), 'Name of associated file' ], 'ASSOC1': ['ANCILLARY.2DSPECTRUM', 'Category of associated file'] } ntt.util.updateheader(_imgf, 0, hedvec) if _imgf not in outputfile: outputfile.append(_imgf) if listatmo: atmofile = ntt.util.searcharc(_imgex, listatmo)[0] if atmofile: _imge = re.sub('_f.fits', '_e.fits', _imgf) ntt.util.delete(_imge) iraf.specred.sarith(input1=_imgf, op='/', input2=atmofile, output=_imge, w1='INDEF', w2='INDEF', format='multispec') try: iraf.imutil.imcopy(input=_imgf + '[*,1,2]', output=_imge + '[*,1,2]', verbose='no') except: pass try: iraf.imutil.imcopy(input=_imgf + '[*,1,3]', output=_imge + '[*,1,3]', verbose='no') except: pass try: iraf.imutil.imcopy(input=_imgf + '[*,1,4]', output=_imge + '[*,1,4]', verbose='no') except: pass if _imge not in outputfile: outputfile.append(_imge) ntt.util.updateheader( _imge, 0, { 'FILETYPE': [ 22210, '1D, wave, flux calib, telluric corr.' ] }) if atmofile not in outputfile: outputfile.append(atmofile) ntt.util.updateheader(_imge, 0, {'ATMOFILE': [atmofile, '']}) ntt.util.updateheader( _imge, 0, {'TRACE1': [_imgf, 'Originating file']}) imgin = _imge else: imgin = _imgf else: imgin = _imgf imgasci = re.sub('.fits', '.asci', imgin) ntt.util.delete(imgasci) iraf.onedspec(_doprint=0, Stdout=0) iraf.onedspec.wspectext(imgin + '[*,1,1]', imgasci, header='no') if imgasci not in outputfile: outputfile.append(imgasci) print '\n### adding keywords for phase 3 ....... ' for img in outputfile: if str(img)[-5:] == '.fits': try: ntt.util.phase3header(img) # phase 3 definitions ntt.util.updateheader(img, 0, {'quality': ['Final', '']}) except: print 'Warning: ' + img + ' is not a fits file' try: if int(re.sub('\.', '', str(pyfits.__version__))[:2]) <= 30: aa = 'HIERARCH ' else: aa = '' except: aa = '' imm = pyfits.open(img, mode='update') hdr = imm[0].header if aa + 'ESO DPR CATG' in hdr: hdr.pop(aa + 'ESO DPR CATG') if aa + 'ESO DPR TECH' in hdr: hdr.pop(aa + 'ESO DPR TECH') if aa + 'ESO DPR TYPE' in hdr: hdr.pop(aa + 'ESO DPR TYPE') imm.flush() imm.close() print outputfile reduceddata = ntt.rangedata(outputfile) f = open( 'logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list', 'w') for img in outputfile: try: f.write(ntt.util.readkey3(ntt.util.readhdr(img), 'arcfile') + '\n') except: pass f.close() return outputfile, 'logfile_spec1d_' + str(reduceddata) + '_' + str( datenow) + '.raw.list'
def export_observation_to_txt(fits_path, txt_path): print(' Exporting file') for order in np.arange(1, 32, 1): try: iraf.wspectext(input=fits_path + '[*,' + str(order) + ',1]', output=txt_path + '_{:.0f}.txt'.format(order), header='no') except Exception as e: print(e) pass iraf.noao(_doprint=0, Stdout="/dev/null") iraf.rv(_doprint=0, Stdout="/dev/null") iraf.imred(_doprint=0, Stdout="/dev/null") iraf.ccdred(_doprint=0, Stdout="/dev/null") iraf.images(_doprint=0, Stdout="/dev/null") iraf.immatch(_doprint=0, Stdout="/dev/null") iraf.onedspec(_doprint=0, Stdout="/dev/null") iraf.twodspec(_doprint=0, Stdout="/dev/null") iraf.apextract(_doprint=0, Stdout="/dev/null") iraf.imutil(_doprint=0, Stdout="/dev/null") iraf.echelle(_doprint=0, Stdout="/dev/null") iraf.astutil(_doprint=0, Stdout="/dev/null") iraf.apextract.dispaxi = 1 iraf.echelle.dispaxi = 1 iraf.ccdred.instrum = 'blank.txt' os.environ['PYRAF_BETA_STATUS'] = '1' os.system('mkdir uparm') iraf.set(uparm=os.getcwd() + '/uparm')
# ------------------------------------------------------------------------------------------------------------------- # # Read Data Containing Information On FILTERS (Extinction Coefficients For Rv = 3.1, Fitzpatrick(1999)) # ------------------------------------------------------------------------------------------------------------------- # filter_df = pd.read_csv(DIR_CODE + 'FILTERS_UVIT.dat', sep='\s+') filter_df = filter_df.replace('INDEF', np.nan).set_index(['FILTER', 'Name']).astype('float64') filter_df = filter_df.reset_index().set_index('FILTER') # ------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------------------------------------------------------------------------------- # # Load Required IRAF Packages # ------------------------------------------------------------------------------------------------------------------- # iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.digiphot(_doprint=0) iraf.daophot(_doprint=0) iraf.ptools(_doprint=0) iraf.ccdred.instrument = 'ccddb$kpno/camera.dat' # ------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------------------------------------------------------------------------------- # # Functions For Handling Files & Lists # ------------------------------------------------------------------------------------------------------------------- # def remove_file(file_name): """ Removes the file 'file_name' in the constituent directory.
def create_masterflat(flatdir=None, biasdir=None, channel='rc'): ''' Creates a masterflat from both dome flats and sky flats if the number of counts in the given filter is not saturated and not too low (between 1500 and 40000). ''' if (flatdir == None or flatdir==""): flatdir = "." if (biasdir == None or biasdir==""): biasdir = "." os.chdir(flatdir) if (len(glob.glob("Flat_%s*norm.fits"%channel)) == 4): print "Master Flat exists!" return else: print "Starting the Master Flat creation!" bias_slow = "Bias_%s_fast.fits"%channel bias_fast = "Bias_%s_fast.fits"%channel if (not os.path.isfile(bias_slow) and not os.path.isfile(bias_fast) ): create_masterbias(biasdir) lsflat = [] lfflat = [] #Select all filts that are Flats with same instrument for f in glob.glob("*fits"): #try: if fitsutils.has_par(f, "OBJECT"): obj = str.upper(fitsutils.get_par(f, "OBJECT")) else: continue if ( ("DOME" in obj or "FLAT" in obj) and (channel == fitsutils.get_par(f, "CHANNEL"))): if (fitsutils.get_par(f, "ADCSPEED")==2): lfflat.append(f) else: lsflat.append(f) #except: # print "Error with retrieving parameters for file", f # pass print "Files for slow flat", lsflat print "Files for fast flat", lfflat fsfile ="lflat_slow_"+channel np.savetxt(fsfile, np.array(lsflat), fmt="%s") fffile ="lflat_fast_"+channel np.savetxt(fffile, np.array(lfflat), fmt="%s") # Running IRAF iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) #Remove bias from the flat if len(lsflat) >0: iraf.imarith("@"+fsfile, "-", bias_slow, "b_@"+fsfile) if len(lfflat) >0: iraf.imarith("@"+fffile, "-", bias_fast, "b_@"+fffile) #Slices the flats. debiased_flats = glob.glob("b_*.fits") for f in debiased_flats: print "Slicing file", f slice_rc(f) #Remove the un-sliced file os.remove(f) #Selects the ones that are suitable given the number of counts and combines them. bands = ['u', 'g', 'r', 'i'] for b in bands: out = "Flat_%s_%s.fits"%(channel, b) out_norm = out.replace(".fits","_norm.fits") if (os.path.isfile(out_norm)): print "Master Flat for filter %s exists. Skipping..."%b continue lfiles = [] for f in glob.glob('b_*_%s.fits'%b): d = pf.open(f)[0].data if np.percentile(d, 90)>1500 and np.percentile(d, 90)<40000: lfiles.append(f) if len(lfiles) == 0: print "WARNING!!! Could not find suitable flats for band %s"%b continue ffile ="lflat_"+b np.savetxt(ffile, np.array(lfiles), fmt="%s") #Cleaning of old files if(os.path.isfile(out)): os.remove(out) if(os.path.isfile(out_norm)): os.remove(out_norm) if(os.path.isfile("Flat_stats")): os.remove("Flat_stats") #Combine flats iraf.imcombine(input = "@"+ffile, \ output = out, \ combine = "median",\ scale = "mode", weight = "exposure") iraf.imstat(out, fields="image,npix,mean,stddev,min,max,mode", Stdout="Flat_stats") st = np.genfromtxt("Flat_stats", names=True, dtype=None) #Normalize flats iraf.imarith(out, "/", st["MODE"], out_norm) #Do some cleaning print 'Removing from lfiles' for f in glob.glob('b_*_%s.fits'%b): os.remove(f) os.remove(ffile) if os.path.isfile(fsfile): os.remove(fsfile) if os.path.isfile(fffile): os.remove(fffile)
def create_superflat(imdir, filters=["u", "g", "r", "i"]): #Locate images for each filter imlist = glob.glob("rc*fits") #Run sextractor to locate bright sources sexfiles = sextractor.run_sex(imlist, overwrite=False) maskfiles = [] for i, im in enumerate(imlist): #Create a mask and store it int he mask directory maskfile = mask_stars(im, sexfiles[i]) maskfiles.append(maskfile) fitsutils.update_par(im, "BPM", os.path.relpath(maskfile)) for filt in filters: fimlist = [im for im in imlist if fitsutils.get_par(im, "FILTER") == filt] fmasklist = [im for im in maskfiles if fitsutils.get_par(im, "FILTER") == filt] if len(fimlist) == 0: continue fsfile ="lflat_%s"%filt msfile = "lmask_%s"%filt np.savetxt(fsfile, np.array(fimlist), fmt="%s") np.savetxt(msfile, np.array(fmasklist), fmt="%s") '''masklist = [] for m in fmasklist: hdulist = fits.open(m) data = hdulist[0].data masklist.append(data) masklist = np.array(masklist) hdu = fits.PrimaryHDU(masklist) hdulist = fits.HDUList([hdu]) hdulist.writeto("mastermask_%s.fits"%filt)''' # Running IRAF iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.imarith("@"+fsfile, "*", "@"+msfile, "m_@"+fsfile) #Combine flats iraf.imcombine(input = "m_@"+fsfile, \ output = "superflat_%s.fits"%filt, \ combine = "median",\ scale = "mode", \ masktype="badvalue",\ maskvalue = 0) iraf.imstat("superflat_%s.fits"%filt, fields="image,npix,mean,stddev,min,max,mode", Stdout="Flat_stats") time.sleep(0.1) st = np.genfromtxt("Flat_stats", names=True, dtype=None) #Normalize flats iraf.imarith("superflat_%s.fits"%filt, "/", st["MODE"], "superflat_%s_norm.fits"%filt)
def lickshane1Dredu(files, _interactive, _ext_trace, _dispersionline, _automaticex, _verbose=False): import lickshane import datetime import os import re import string import sys liststandard = '' listatmo0 = '' # os.environ["PYRAF_BETA_STATUS"] = "1" _extinctdir = 'direc$standard/extinction/' _extinction = 'lick.dat' _observatory = 'lick' now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days scal = np.pi / 180. dv = lickshane.util.dvex() std, rastd, decstd, magstd = lickshane.util.readstandard( 'standard_lick_mab.txt') objectlist = {} for img in files: hdr = lickshane.util.readhdr(img) img = re.sub('\n', '', img) lickshane.util.correctcard(img) _ra = lickshane.util.readkey3(hdr, 'RA') _dec = lickshane.util.readkey3(hdr, 'DEC') _object = lickshane.util.readkey3(hdr, 'object') _grism = lickshane.util.readkey3(hdr, 'grism') _slit = lickshane.util.readkey3(hdr, 'slit') dd = np.arccos( np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos( (_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' else: _type = 'obj' print img, _type if min(dd) < 100: lickshane.util.updateheader(img, 0, {'stdname': [std[np.argmin(dd)], '']}) lickshane.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) if _type not in objectlist: objectlist[_type] = {} if (_grism, _slit) not in objectlist[_type]: objectlist[_type][_grism, _slit] = [img] else: objectlist[_type][_grism, _slit].append(img) from pyraf import iraf iraf.set(stdimage='imt2048') iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) iraf.imutil(_doprint=0) toforget = ['imutil.imcopy', 'specred.sarith', 'specred.standard'] for t in toforget: iraf.unlearn(t) iraf.specred.verbose = 'no' iraf.specred.dispaxi = 2 iraf.set(direc=lickshane.__path__[0] + '/') sens = {} print objectlist outputfile = [] if 'obj' in objectlist.keys(): tpe = 'obj' elif 'stdsens' in objectlist.keys(): tpe = 'stdsens' else: sys.exit('error: no objects and no standards in the list') for setup in objectlist[tpe]: extracted = [] listatmo = [] if setup not in sens: sens[setup] = [] if tpe == 'obj': print '\n### setup= ', setup, '\n### objects= ', objectlist['obj'][ setup], '\n' for img in objectlist['obj'][setup]: # hdr=readhdr(img) print '\n\n### next object= ', img, ' ', lickshane.util.readkey3( lickshane.util.readhdr(img), 'object'), '\n' #_automaticex = '' imgex = lickshane.lickshane1Ddef.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, 'obj', automaticex=_automaticex) if not os.path.isfile(imgex): sys.exit('### error, extraction not computed') if not lickshane.util.readkey3(lickshane.util.readhdr(imgex), 'shift') and \ lickshane.util.readkey3(lickshane.util.readhdr(imgex), 'shift') != 0.0: if setup in ['300_7500']: lickshane.lickshane1Ddef.checkwavestd( imgex, _interactive) else: print 'wave check using teluric not possible' extracted.append(imgex) if imgex not in outputfile: outputfile.append(imgex) lickshane.util.updateheader( imgex, 0, {'TRACE1': [img, 'Originating file']}) if os.path.isfile('database/ap' + re.sub('_ex.fits', '', imgex)): if 'database/ap' + re.sub('_ex.fits', '', imgex) not in outputfile: outputfile.append('database/ap' + re.sub('_ex.fits', '', imgex)) print '\n### all object with this setup extracted\n' if liststandard: standardlist = liststandard _type = 'stdfromdreducer' else: try: standardlist = objectlist['stdsens'][setup] _type = 'stdsens' except: standardlist = '' _type = '' if _type == 'stdfromdreducer' and len(extracted) >= 1: _outputsens2 = lichshane.util.searchsens(extracted[0], standardlist)[0] print '\n### using standard from reducer ' + str(_outputsens2) elif _type not in ['stdsens', 'stdfromdreducer' ] and len(extracted) >= 1: _outputsens2 = lickshane.util.searchsens(extracted[0], '')[0] os.system('cp ' + _outputsens2 + ' .') _outputsens2 = string.split(_outputsens2, '/')[-1] print '\n### no standard in the list, using standard from archive' else: for simg in standardlist: print '\n### standard for setup ' + \ str(setup) + ' = ', simg, ' ', lickshane.util.readkey3( lickshane.util.readhdr(simg), 'object'), '\n' simgex = lickshane.lickshane1Ddef.extractspectrum( simg, dv, False, False, _interactive, 'std', automaticex=_automaticex) lickshane.util.updateheader( simgex, 0, {'TRACE1': [simg, 'Originating file']}) if not lickshane.util.readkey3( lickshane.util.readhdr(simgex), 'shift') and lickshane.util.readkey3( lickshane.util.readhdr(simgex), 'shift') != 0.0: lickshane.lickshane1Ddef.checkwavestd(simgex, _interactive) print simgex atmofile = lickshane.lickshane1Ddef.telluric_atmo( simgex) # atmo file2 print atmofile lickshane.util.updateheader( atmofile, 0, {'TRACE1': [simgex, 'Originating file']}) if tpe != 'obj' and atmofile not in outputfile: outputfile.append(atmofile) if not listatmo0: listatmo.append(atmofile) sens[setup].append(simgex) if simgex not in outputfile: outputfile.append(simgex) print '\n### standard available: ', sens[setup] if tpe == 'obj': if len(sens[setup]) > 1: goon = 'no' while goon != 'yes': stdused = raw_input( '\n### more than one standard for this setup, which one do you want to use [' + sens[setup][0] + '] ?') if not stdused: stdused = sens[setup][0] if os.path.isfile(stdused): goon = 'yes' else: stdused = sens[setup][0] stdvec = [stdused] else: stdvec = sens[setup] for stdused in stdvec: stdusedclean = re.sub('_ex', '_clean', stdused) lickshane.util.delete(stdusedclean) iraf.specred.sarith(input1=stdused, op='/', input2=atmofile, output=stdusedclean, format='multispec') _outputsens2 = lickshane.lickshane1Ddef.sensfunction( stdusedclean, 'spline3', 16, _interactive) lickshane.util.updateheader( _outputsens2, 0, {'TRACE1': [stdused, 'Originating file']}) if _outputsens2 not in outputfile: outputfile.append(_outputsens2) if _outputsens2 and tpe == 'obj': #################################################### for img in objectlist['obj'][setup]: # flux calibrate 2d images imgd = fluxcalib2d(img, _outputsens2) lickshane.util.updateheader( imgd, 0, {'TRACE1': [img, 'Originating files']}) if imgd not in outputfile: outputfile.append(imgd) #################################################### # flux calib in the standard way if not listatmo and listatmo0: listatmo = listatmo0[:] for _imgex in extracted: _airmass = lickshane.util.readkey3( lickshane.util.readhdr(_imgex), 'airmass') _exptime = lickshane.util.readkey3( lickshane.util.readhdr(_imgex), 'exptime') _imgf = re.sub('_ex.fits', '_f.fits', _imgex) lickshane.util.delete(_imgf) qqq = iraf.specred.calibrate(input=_imgex, output=_imgf, sensiti=_outputsens2, extinct='yes', flux='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, ignorea='yes', exptime=_exptime, fnu='no') hedvec = { 'SENSFUN': [_outputsens2, ''], # 'SNR': [lickshane.util.StoN2(_imgf, False), 'Average signal to noise ratio per pixel'], 'BUNIT': ['erg/cm2/s/Angstrom', 'Physical unit of array values'], 'TRACE1': [_imgex, 'Originating file'], 'ASSON1': [ re.sub('_f.fits', '_2df.fits', _imgf), 'Name of associated file' ], 'ASSOC1': ['ANCILLARY.2DSPECTRUM', 'Category of associated file'] } lickshane.util.updateheader(_imgf, 0, hedvec) if _imgf not in outputfile: outputfile.append(_imgf) if listatmo: atmofile = lickshane.util.searcharc(_imgex, listatmo)[0] if atmofile: _imge = re.sub('_f.fits', '_e.fits', _imgf) lickshane.util.delete(_imge) iraf.specred.sarith(input1=_imgf, op='/', input2=atmofile, output=_imge, w1='INDEF', w2='INDEF', format='multispec') try: iraf.imutil.imcopy(input=_imgf + '[*,1,2]', output=_imge + '[*,1,2]', verbose='no') except: pass try: iraf.imutil.imcopy(input=_imgf + '[*,1,3]', output=_imge + '[*,1,3]', verbose='no') except: pass try: iraf.imutil.imcopy(input=_imgf + '[*,1,4]', output=_imge + '[*,1,4]', verbose='no') except: pass if _imge not in outputfile: outputfile.append(_imge) if atmofile not in outputfile: outputfile.append(atmofile) lickshane.util.updateheader( _imge, 0, {'ATMOFILE': [atmofile, '']}) lickshane.util.updateheader( _imge, 0, {'TRACE1': [_imgf, 'Originating file']}) imgin = _imge else: imgin = _imgf else: imgin = _imgf imgasci = re.sub('.fits', '.asci', imgin) lickshane.util.delete(imgasci) iraf.onedspec(_doprint=0) iraf.onedspec.wspectext(imgin + '[*,1,1]', imgasci, header='no') if imgasci not in outputfile: outputfile.append(imgasci) return objectlist, 'ddd'
def reduce_image(image, flatdir=None, biasdir=None, cosmic=False, astrometry=True, channel='rc', target_dir='reduced', overwrite=False): ''' Applies Flat field and bias calibrations to the image. Steps: 1. - Solve astrometry on the entire image. 2. - Computes cosmic ray rejectionon the entire image. 3. - Compute master bias (if it does not exist) and de-bias the image. 4. - Separate the image into 4 filters. 5. - Compute flat field for each filter (if it does not exist) and apply flat fielding on the image. 6. - Compute the image zeropoint. ''' logger.info("Reducing image %s"% image) print "Reducing image ", image image = os.path.abspath(image) imname = os.path.basename(image).replace(".fits", "") try: objectname = fitsutils.get_par(image, "NAME").replace(" ","")+"_"+fitsutils.get_par(image, "FILTER") except: logger.error( "ERROR, image "+ image + " does not have a NAME or a FILTER!!!") return print "For object", objectname logger.info( "For object %s"% objectname) #Change to image directory mydir = os.path.dirname(image) if mydir=="": mydir = "." mydir = os.path.abspath(mydir) os.chdir(mydir) #Create destination directory if (not os.path.isdir(target_dir)): os.makedirs(target_dir) #If we don't want to overwrite the already extracted images, we check wether they exist. if (not overwrite): existing = True for band in ['u', 'g', 'r', 'i']: destfile = os.path.join(target_dir, imname + "_f_b_a_%s_%s_0.fits"%(objectname, band)) logger.info( "Looking if file %s exists: %s"%( destfile, \ (os.path.isfile(destfile) ) ) ) existing = existing and (os.path.isfile( destfile ) ) if existing: return [] #Initialize the basic parameters. init_header_reduced(image) astro = "" if (astrometry): logger.info( "Solving astometry for the whole image...") img = solve_astrometry(image) if (os.path.isfile(img)): astro="a_" fitsutils.update_par(img, "IQWCS", 1) else: logger.error( "ASTROMETRY DID NOT SOLVE ON IMAGE %s"% image) img = image #Update noise parameters needed for cosmic reection if (fitsutils.get_par(img, "ADCSPEED")==2): fitsutils.update_par(img, "RDNOISE", 20.) else: fitsutils.update_par(img, "RDNOISE", 4.) if (cosmic): logger.info( "Correcting for cosmic rays...") # Correct for cosmics each filter cleanimg = clean_cosmic(os.path.join(os.path.abspath(mydir), img)) img = cleanimg #Get basic statistics for the image nsrc, fwhm, ellip, bkg = sextractor.get_image_pars(img) logger.info( "Sextractor statistics: nscr %d, fwhm (pixel) %.2f, ellipticity %.2f"% (nsrc, fwhm, ellip)) print "Sextractor statistics: nscr %d, fwhm (pixel) %.2f, ellipticity %.2f"% (nsrc, fwhm, ellip) dic = {"SEEPIX": fwhm/0.394, "NSRC":nsrc, "ELLIP":ellip} #Update the seeing information from sextractor fitsutils.update_pars(img, dic) #Compute BIAS if (biasdir is None or biasdir==""): biasdir = "." create_masterbias(biasdir) bias_slow = os.path.join(biasdir, "Bias_%s_%s.fits"%(channel, 'slow')) bias_fast = os.path.join(biasdir, "Bias_%s_%s.fits"%(channel, 'fast')) # Running IRAF to DE-BIAS iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) #Compute flat field if (flatdir is None or flatdir==""): flatdir = "." create_masterflat(flatdir, biasdir) #New names for the object. debiased = os.path.join(os.path.dirname(img), "b_" + os.path.basename(img)) logger.info( "Creating debiased file, %s"%debiased) if ( (fitsutils.get_par(img, "ADCSPEED")==0.1 and not os.path.isfile(bias_slow)) \ or (fitsutils.get_par(img, "ADCSPEED")==2 and not os.path.isfile(bias_fast)) ): logger.warn( "Master bias not found! Tryting to copy from reference folder...") copy_ref_calib(mydir, "Bias") if ( (fitsutils.get_par(img, "ADCSPEED")==0.1 and not os.path.isfile(bias_slow)) \ or (fitsutils.get_par(img, "ADCSPEED")==2 and not os.path.isfile(bias_fast)) ): logger.error( "Bias not found in reference folder") return #Clean first if (os.path.isfile(debiased)): os.remove(debiased) #Debias if (fitsutils.get_par(img, "ADCSPEED")==2): iraf.imarith(img, "-", bias_fast, debiased) fitsutils.update_par(debiased, "BIASFILE", bias_fast) fitsutils.update_par(debiased, "RDNOISE", 20.) else: iraf.imarith(img, "-", bias_slow, debiased) fitsutils.update_par(debiased, "BIASFILE", bias_slow) fitsutils.update_par(debiased, "RDNOISE", 4.) #Set negative counts to zero hdu = fits.open(debiased) header = hdu[0].header hdu[0].data[hdu[0].data<0] = 0 hdu.writeto(debiased, clobber=True) #Slicing the image for flats slice_names = slice_rc(debiased) print "Creating sliced files, ", slice_names #Remove un-sliced image os.remove(debiased) # DE-flat each filter and store under object name for i, debiased_f in enumerate(slice_names): b = fitsutils.get_par(debiased_f, 'filter') deflatted = os.path.join(os.path.dirname(image), target_dir, imname + "_f_b_" + astro + objectname + "_%s.fits"%b) #Flat to be used for that filter flat = os.path.join(flatdir, "Flat_%s_%s_norm.fits"%(channel, b)) if (not os.path.isfile(flat)): logger.warn( "Master flat not found in %s"% flat) copy_ref_calib(mydir, "Flat_%s_%s_norm"%(channel, b)) continue else: logger.info( "Using flat %s"%flat) #Cleans the deflatted file if exists if (os.path.isfile(deflatted)): os.remove(deflatted) if (os.path.isfile(debiased_f) and os.path.isfile(flat)): logger.info( "Storing de-flatted %s as %s"%(debiased_f, deflatted)) time.sleep(1) iraf.imarith(debiased_f, "/", flat, deflatted) else: logger.error( "SOMETHING IS WRONG. Error when dividing %s by the flat field %s!"%(debiased_f, flat)) #Removes the de-biased file os.remove(debiased_f) logger.info( "Updating header with original filename and flat field used.") fitsutils.update_par(deflatted, "ORIGFILE", os.path.basename(image)) fitsutils.update_par(deflatted, "FLATFILE", flat) slice_names[i] = deflatted #Moving files to the target directory for image in slice_names: bkg = get_median_bkg(image) fitsutils.update_par(image, "SKYBKG", bkg) #shutil.move(name, newname) #Compute the zeropoints for image in slice_names: zeropoint.calibrate_zeropoint(image) return slice_names
def sofispec1Dredu(files, _interactive, _ext_trace, _dispersionline, _automaticex, _verbose=False): # print "LOGX:: Entering `sofispec1Dredu` method/function in %(__file__)s" # % globals() import re import string import sys import os os.environ["PYRAF_BETA_STATUS"] = "1" import ntt try: import pyfits except: from astropy.io import fits as pyfits import numpy as np import datetime import pylab as pl from pyraf import iraf dv = ntt.dvex() now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days scal = np.pi / 180. hdr0 = ntt.util.readhdr(re.sub('\n', '', files[0])) _gain = ntt.util.readkey3(hdr0, 'gain') _rdnoise = ntt.util.readkey3(hdr0, 'ron') std_sun, rastd_sun, decstd_sun, magstd_sun = ntt.util.readstandard( 'standard_sofi_sun.txt') std_vega, rastd_vega, decstd_vega, magstd_vega = ntt.util.readstandard( 'standard_sofi_vega.txt') std_phot, rastd_phot, decstd_phot, magstd_phot = ntt.util.readstandard( 'standard_sofi_phot.txt') outputfile = [] objectlist, RA, DEC = {}, {}, {} for img in files: img = re.sub('\n', '', img) hdr = ntt.util.readhdr(img) _ra = ntt.util.readkey3(hdr, 'RA') _dec = ntt.util.readkey3(hdr, 'DEC') _grism = ntt.util.readkey3(hdr, 'grism') _filter = ntt.util.readkey3(hdr, 'filter') _slit = ntt.util.readkey3(hdr, 'slit') cc_sun = np.arccos(np.sin(_dec * scal) * np.sin(decstd_sun * scal) + np.cos(_dec * scal) * np.cos(decstd_sun * scal) * np.cos((_ra - rastd_sun) * scal)) * ((180 / np.pi) * 3600) cc_vega = np.arccos(np.sin(_dec * scal) * np.sin(decstd_vega * scal) + np.cos(_dec * scal) * np.cos(decstd_vega * scal) * np.cos((_ra - rastd_vega) * scal)) * ((180 / np.pi) * 3600) cc_phot = np.arccos(np.sin(_dec * scal) * np.sin(decstd_phot * scal) + np.cos(_dec * scal) * np.cos(decstd_phot * scal) * np.cos((_ra - rastd_phot) * scal)) * ((180 / np.pi) * 3600) if min(cc_sun) < 100: _type = 'sun' elif min(cc_phot) < 100: _type = 'stdp' elif min(cc_vega) < 100: _type = 'vega' else: _type = 'obj' if min(cc_phot) < 100: if _verbose: print img, 'phot', str(min(cc_phot)), str(std_phot[np.argmin(cc_phot)]) ntt.util.updateheader(img, 0, {'stdname': [std_phot[np.argmin(cc_phot)], ''], 'magstd': [float(magstd_phot[np.argmin(cc_phot)]), '']}) # ntt.util.updateheader(img,0,{'magstd':[float(magstd_phot[argmin(cc_phot)]),'']}) elif min(cc_sun) < 100: if _verbose: print img, 'sun', str(min(cc_sun)), str(std_sun[np.argmin(cc_sun)]) ntt.util.updateheader(img, 0, {'stdname': [std_sun[np.argmin(cc_sun)], ''], 'magstd': [float(magstd_sun[np.argmin(cc_sun)]), '']}) # ntt.util.updateheader(img,0,{'magstd':[float(magstd_sun[argmin(cc_sun)]),'']}) elif min(cc_vega) < 100: if _verbose: print img, 'vega', str(min(cc_vega)), str(std_vega[np.argmin(cc_vega)]) ntt.util.updateheader(img, 0, {'stdname': [std_vega[np.argmin(cc_vega)], ''], 'magstd': [float(magstd_vega[np.argmin(cc_vega)]), '']}) # ntt.util.updateheader(img,0,{'magstd':[float(magstd_vega[argmin(cc_vega)]),'']}) else: if _verbose: print img, 'object' _OBID = (ntt.util.readkey3(hdr, 'esoid')) if _type not in objectlist: objectlist[_type] = {} if _grism not in objectlist[_type]: objectlist[_type][_grism] = {} if _OBID not in objectlist[_type][_grism]: objectlist[_type][_grism][_OBID] = [] objectlist[_type][_grism][_OBID].append(img) if 'stdp' not in objectlist: print '### warning: not photometric standard' else: print '### photometric standard in the list of object' if 'sun' not in objectlist: print '### warning: not telluric G standard (sun type)' else: print '### telluric G standard (sun type) in the list of object' if 'vega' not in objectlist: print '### warning: not telluric A standard (vega type)' else: print '### telluric A standard (vega type) in the list of object' iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) iraf.immatch(_doprint=0) iraf.imutil(_doprint=0) toforget = ['specred.apall', 'specred.transform'] for t in toforget: iraf.unlearn(t) iraf.specred.apall.readnoi = _rdnoise iraf.specred.apall.gain = _gain iraf.specred.dispaxi = 2 for _type in objectlist: for setup in objectlist[_type]: for _ID in objectlist[_type][setup]: listmerge = objectlist[_type][setup][_ID] listmerge = ntt.sortbyJD(listmerge) _object = ntt.util.readkey3( ntt.util.readhdr(listmerge[0]), 'object') if string.count(_object, '/') or string.count(_object, '.') or string.count(_object, ' '): nameobj = string.split(_object, '/')[0] nameobj = string.split(nameobj, ' ')[0] nameobj = string.split(nameobj, '.')[0] else: nameobj = _object _date = ntt.util.readkey3( ntt.util.readhdr(listmerge[0]), 'date-night') outputimage = nameobj + '_' + _date + \ '_' + setup + '_merge_' + str(MJDtoday) outputimage = ntt.util.name_duplicate( listmerge[0], outputimage, '') print '### setup= ', setup, ' name field= ', nameobj, ' merge image= ', outputimage, '\n' ################# # added to avoid crashing with a single frame # header will not be updated with all info ################# if len(listmerge)==1: ntt.util.delete(outputimage) iraf.imutil.imcopy(listmerge[0], output=outputimage, verbose='no') answ= 'n' else: if os.path.isfile(outputimage) and _interactive: answ = raw_input( 'combine frame of dithered spectra already created. Do you want to make it again [[y]/n] ? ') if not answ: answ = 'y' else: answ = 'y' ################# if answ in ['Yes', 'y', 'Y', 'yes']: if _interactive: automaticmerge = raw_input( '\n### Do you want to try to find the dither bethween frames automatically [[y]/n]') if not automaticmerge: automaticmerge = 'yes' elif automaticmerge.lower() in ['y', 'yes']: automaticmerge = 'yes' else: automaticmerge = 'no' else: automaticmerge = 'yes' if automaticmerge == 'yes': offset = 0 offsetvec = [] _center0 = ntt.sofispec1Ddef.findaperture( listmerge[0], False) _offset0 = ntt.util.readkey3( ntt.util.readhdr(listmerge[0]), 'xcum') print '\n### Try to merge spectra considering their offset along x axes .......' f = open('_offset', 'w') for img in listmerge: _center = ntt.sofispec1Ddef.findaperture( img, False) _center2 = ( float(_center) + (float(_offset0) - float(_center0))) * (-1) _offset = (-1) * \ ntt.util.readkey3( ntt.util.readhdr(img), 'xcum') if abs(_center2 - _offset) >= 20: automaticmerge = 'no' break else: offset3 = _center2 offsetvec.append(offset3) line = str(offset3) + ' 0\n' f.write(line) f.close() if automaticmerge == 'yes': print '### automatic merge .......... done' else: print '\n### warning: try identification of spectra position in interactive way ' offset = 0 offsetvec = [] _z1, _z2, goon = ntt.util.display_image( listmerge[0], 1, '', '', False) print '\n### find aperture on first frame and use it as reference position of ' \ 'the spectra (mark with ' + '"' + 'm' + '"' + ')' _center0 = ntt.sofispec1Ddef.findaperture( listmerge[0], True) _offset0 = ntt.util.readkey3( ntt.util.readhdr(listmerge[0]), 'xcum') print '\n### find the aperture on all the spectra frames (mark with ' + '"' + 'm' + '"' + ')' f = open('_offset', 'w') for img in listmerge: print '\n### ', img _z1, _z2, goon = ntt.util.display_image( img, 1, '', '', False) _center = ntt.sofispec1Ddef.findaperture(img, True) _center2 = ( float(_center) + (float(_offset0) - float(_center0))) * (-1) _offset = (-1) * \ ntt.util.readkey3( ntt.util.readhdr(img), 'xcum') print '\n### position from dither header: ' + str(_offset) print '### position identified interactively: ' + str(_center2) offset3 = raw_input( '\n### which is the right position [' + str(_center2) + '] ?') if not offset3: offset3 = _center2 offsetvec.append(offset3) line = str(offset3) + ' 0\n' f.write(line) f.close() print offsetvec start = int(max(offsetvec) - min(offsetvec)) print start f = open('_goodlist', 'w') print listmerge for img in listmerge: f.write(img + '\n') f.close() ntt.util.delete(outputimage) ntt.util.delete('_output.fits') yy1 = pyfits.open(listmerge[0])[0].data[:, 10] iraf.immatch.imcombine('@_goodlist', '_output', combine='sum', reject='none', offset='_offset', masktyp='', rdnoise=_rdnoise, gain=_gain, zero='mode', Stdout=1) _head = pyfits.open('_output.fits')[0].header if _head['NAXIS1'] < 1024: stop = str(_head['NAXIS1']) else: stop = '1024' iraf.imutil.imcopy( '_output[' + str(start) + ':'+stop+',*]', output=outputimage, verbose='no') print outputimage print len(listmerge) hdr1 = ntt.util.readhdr(outputimage) ntt.util.updateheader(outputimage, 0, {'SINGLEXP': [False, 'TRUE if resulting from single exposure'], 'M_EPOCH': [False, 'TRUE if resulting from multiple epochs'], 'EXPTIME': [ntt.util.readkey3(hdr1, 'EXPTIME') * len(listmerge), 'Total integration time per pixel (s)'], 'TEXPTIME': [float(ntt.util.readkey3(hdr1, 'TEXPTIME')) * len(listmerge), 'Total integration time of all exposures (s)'], 'APERTURE': [2.778e-4 * float(re.sub('long_slit_', '', ntt.util.readkey3(hdr1, 'slit'))), '[deg] Aperture diameter'], 'NOFFSETS': [2, 'Number of offset positions'], 'NUSTEP': [0, 'Number of microstep positions'], 'NJITTER': [int(ntt.util.readkey3(hdr1, 'NCOMBINE') / 2), 'Number of jitter positions']}) hdr = ntt.util.readhdr(outputimage) matching = [s for s in hdr.keys() if "IMCMB" in s] for imcmb in matching: aaa = iraf.hedit(outputimage, imcmb, delete='yes', update='yes', verify='no', Stdout=1) if 'SKYSUB' in hdr.keys(): aaa = iraf.hedit(outputimage, 'SKYSUB', delete='yes', update='yes', verify='no', Stdout=1) mjdend = [] mjdstart = [] num = 0 for img in listmerge: num = num + 1 hdrm = ntt.util.readhdr(img) ntt.util.updateheader(outputimage, 0, {'PROV' + str(num): [ntt.util.readkey3( hdrm, 'ARCFILE'), 'Originating file'], 'TRACE' + str(num): [img, 'Originating file']}) mjdend.append(ntt.util.readkey3(hdrm, 'MJD-END')) mjdstart.append(ntt.util.readkey3(hdrm, 'MJD-OBS')) _dateobs = ntt.util.readkey3(ntt.util.readhdr( listmerge[np.argmin(mjdstart)]), 'DATE-OBS') _telapse = (max(mjdend) - min(mjdstart)) * \ 60. * 60 * 24. # *86400 _tmid = (max(mjdend) + min(mjdstart)) / 2 _title = str(_tmid)[0:9] + ' ' + str(ntt.util.readkey3(hdr, 'object')) + ' ' + str( ntt.util.readkey3(hdr, 'grism')) + ' ' + \ str(ntt.util.readkey3(hdr, 'filter')) + \ ' ' + str(ntt.util.readkey3(hdr, 'slit')) ntt.util.updateheader(outputimage, 0, {'MJD-OBS': [min(mjdstart), 'MJD start'], 'MJD-END': [max(mjdend), 'MJD end'], 'TELAPSE': [_telapse, 'Total elapsed time [days]'], 'TMID': [_tmid, '[d] MJD mid exposure'], 'TITLE': [_title, 'Dataset title'], 'DATE-OBS': [_dateobs, 'Date of observation']}) # missing: merge airmass else: print '\n### skip making again combined spectrum' objectlist[_type][setup][_ID] = [outputimage] print '\n### setup= ', setup, ' name field= ', nameobj, ' merge image= ', outputimage, '\n' if outputimage not in outputfile: outputfile.append(outputimage) ntt.util.updateheader(outputimage, 0, {'FILETYPE': [ 42116, 'combine 2D spectra frame']}) if _verbose: if 'obj' in objectlist: print objectlist['obj'] if 'stdp' in objectlist: print objectlist['stdp'] if 'sun' in objectlist: print objectlist['sun'] if 'vega' in objectlist: print objectlist['vega'] if 'obj' not in objectlist.keys(): sys.exit('\n### error: no objects in the list') sens = {} print '\n############################################\n### extract the spectra ' # print objectlist for setup in objectlist['obj']: reduced = [] for _ID in objectlist['obj'][setup]: for img in objectlist['obj'][setup][_ID]: hdr = ntt.util.readhdr(img) print '\n### next object\n ', img, ntt.util.readkey3(hdr, 'object') _grism = ntt.util.readkey3(hdr, 'grism') _exptimeimg = ntt.util.readkey3(hdr, 'exptime') _JDimg = ntt.util.readkey3(hdr, 'JD') imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'obj', automaticex=_automaticex) if imgex not in outputfile: outputfile.append(imgex) ntt.util.updateheader(imgex, 0, {'FILETYPE': [42107, 'extracted 1D wave calib'], 'PRODCATG': ['SCIENCE.' + ntt.util.readkey3(hdr, 'tech').upper(), 'Data product category']}) hdr = ntt.util.readhdr(imgex) matching = [s for s in hdr.keys() if "TRACE" in s] for imcmb in matching: aaa = iraf.hedit(imgex, imcmb, delete='yes', update='yes', verify='no', Stdout=1) ntt.util.updateheader( imgex, 0, {'TRACE1': [img, 'Originating file']}) if os.path.isfile('database/ap' + re.sub('_ex.fits', '', imgex)): if 'database/ap' + re.sub('_ex.fits', '', imgex) not in outputfile: outputfile.append( 'database/ap' + re.sub('_ex.fits', '', imgex)) ########################### telluric standard ############# if 'sun' in objectlist and setup in objectlist['sun']: _type = 'sun' elif 'vega' in objectlist and setup in objectlist['vega']: _type = 'vega' else: _type = 'none' if _type in ['sun', 'vega']: stdref = ntt.__path__[ 0] + '/standard/fits/' + str(_type) + '.fits' stdvec, airmassvec, JDvec = [], [], [] for _ID in objectlist[_type][setup]: for std in objectlist[_type][setup][_ID]: _airmassstd = ntt.util.readkey3( ntt.util.readhdr(std), 'airmass') _JDstd = ntt.util.readkey3( ntt.util.readhdr(std), 'JD') JDvec.append(abs(_JDstd - _JDimg)) stdvec.append(std) airmassvec.append(_airmassstd) stdtelluric = stdvec[np.argmin(JDvec)] _exptimestd = ntt.util.readkey3( ntt.util.readhdr(stdtelluric), 'exptime') _magstd = ntt.util.readkey3( ntt.util.readhdr(stdtelluric), 'magstd') print '\n\n ##### closer standard for telluric corrections #### \n\n' print stdtelluric, airmassvec[np.argmin(JDvec)] stdtelluric_ex = ntt.util.extractspectrum(stdtelluric, dv, False, False, _interactive, 'std', automaticex=_automaticex) if stdtelluric_ex not in outputfile: outputfile.append(stdtelluric_ex) ntt.util.updateheader(stdtelluric_ex, 0, {'FILETYPE': [ 42107, 'extracted 1D wave calib ']}) ntt.util.updateheader(stdtelluric_ex, 0, {'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(stdtelluric_ex), 'tech').upper(), 'Data product category']}) hdr = ntt.util.readhdr(stdtelluric_ex) matching = [s for s in hdr.keys() if "TRACE" in s] for imcmb in matching: aaa = iraf.hedit( stdtelluric_ex, imcmb, delete='yes', update='yes', verify='no', Stdout=1) ntt.util.updateheader(stdtelluric_ex, 0, {'TRACE1': [ stdtelluric, 'Originating file']}) ########################################################### # SN tellurich calibration imgf = re.sub('_ex.fits', '_f.fits', imgex) imgf, senstelluric = ntt.sofispec1Ddef.calibrationsofi(imgex, stdtelluric_ex, stdref, imgf, _interactive) if imgf not in outputfile: outputfile.append(imgf) if senstelluric not in outputfile: outputfile.append(senstelluric) ntt.util.updateheader(imgf, 0, {'FILETYPE': [42208, '1D wave calib, tell cor.'], # 'SNR': [ntt.util.StoN(imgf, 50), 'SNR': [ntt.util.StoN2(imgf, False), 'Average signal to noise ratio per pixel'], 'TRACE1': [imgex, 'Originating file'], 'ASSON1': [re.sub('_f.fits', '_2df.fits', imgf), 'Name of associated file'], 'ASSOC1': ['ANCILLARY.2DSPECTRUM', 'Category of associated file']}) ########################################################### imgd = ntt.efoscspec1Ddef.fluxcalib2d( img, senstelluric) # flux calibration 2d images ntt.util.updateheader( imgd, 0, {'FILETYPE': [42209, '2D wavelength and flux calibrated spectrum']}) iraf.hedit(imgd, 'PRODCATG', delete='yes', update='yes', verify='no') hdrd = ntt.util.readhdr(imgd) matching = [s for s in hdrd.keys() if "TRACE" in s] for imcmb in matching: aaa = iraf.hedit( imgd, imcmb, delete='yes', update='yes', verify='no', Stdout=1) ntt.util.updateheader( imgd, 0, {'TRACE1': [img, 'Originating file']}) if imgd not in outputfile: outputfile.append(imgd) ############################################################### if 'stdp' in objectlist and setup in objectlist['stdp']: print '\n ##### photometric calibration ######\n ' standardfile = [] for _ID in objectlist['stdp'][setup]: for stdp in objectlist['stdp'][setup][_ID]: stdp_ex = ntt.util.extractspectrum(stdp, dv, False, _dispersionline, _interactive, 'std', automaticex=_automaticex) standardfile.append(stdp_ex) if stdp_ex not in outputfile: outputfile.append(stdp_ex) ntt.util.updateheader(stdp_ex, 0, { 'FILETYPE': [42107, 'extracted 1D wave calib'], 'TRACE1': [stdp_ex, 'Originating file'], 'PRODCATG': ['SCIENCE.' + ntt.util.readkey3(ntt.util.readhdr(stdp_ex), 'tech').upper(), 'Data product category']}) print '\n### ', standardfile, ' \n' if len(standardfile) >= 2: standardfile0 = raw_input( 'which one do you want to use [' + str(standardfile[0]) + '] ? ') if not standardfile0: standardfile0 = standardfile[0] else: standardfile0 = standardfile[0] print standardfile0 stdpf = re.sub('_ex.fits', '_f.fits', standardfile0) stdpf, senstelluric2 = ntt.sofispec1Ddef.calibrationsofi(standardfile0, stdtelluric_ex, stdref, stdpf, _interactive) if stdpf not in outputfile: outputfile.append(stdpf) ntt.util.updateheader(stdpf, 0, {'FILETYPE': [42208, '1D wave calib, tell cor'], 'TRACE1': [stdp, 'Originating file']}) stdname = ntt.util.readkey3( ntt.util.readhdr(standardfile0), 'stdname') standardfile = ntt.__path__[ 0] + '/standard/flux/' + stdname xx, yy = ntt.util.ReadAscii2(standardfile) crval1 = pyfits.open(stdpf)[0].header.get('CRVAL1') cd1 = pyfits.open(stdpf)[0].header.get('CD1_1') datastdpf, hdrstdpf = pyfits.getdata(stdpf, 0, header=True) xx1 = np.arange(len(datastdpf[0][0])) aa1 = crval1 + (xx1) * cd1 yystd = np.interp(aa1, xx, yy) rcut = np.compress( ((aa1 < 13000) | (aa1 > 15150)) & ((11700 < aa1) | (aa1 < 11000)) & (aa1 > 10000) & ((aa1 < 17800) | (aa1 > 19600)) & (aa1 < 24000), datastdpf[0][0] / yystd) aa11 = np.compress( ((aa1 < 13000) | (aa1 > 15150)) & ((11700 < aa1) | (aa1 < 11000)) & (aa1 > 10000) & ((aa1 < 17800) | (aa1 > 19600)) & (aa1 < 24000), aa1) yy1clean = np.interp(aa1, aa11, rcut) aa1 = np.array(aa1) yy1clean = np.array(yy1clean) A = np.ones((len(rcut), 2), dtype=float) A[:, 0] = aa11 result = np.linalg.lstsq(A, rcut) # result=[zero,slope] p = [result[0][1], result[0][0]] yfit = ntt.util.pval(aa1, p) pl.clf() pl.ion() pl.plot(aa1, datastdpf[0][0] / yystd, color='red', label='std') pl.plot(aa1, yfit, color='blue', label='fit') pl.legend(numpoints=1, markerscale=1.5) # sens function sofi spectra outputsens = 'sens_' + stdpf ntt.util.delete(outputsens) datastdpf[0][0] = yfit pyfits.writeto(outputsens, np.float32(datastdpf), hdrstdpf) ################# imgsc = re.sub('_ex.fits', '_sc.fits', imgex) ntt.util.delete(imgsc) crval2 = pyfits.open(imgf)[0].header.get('CRVAL1') cd2 = pyfits.open(imgf)[0].header.get('CD1_1') dataf, hdrf = pyfits.getdata(imgf, 0, header=True) xx2 = np.arange(len(dataf[0][0])) aa2 = crval2 + (xx2) * cd2 yyscale = np.interp(aa2, aa1, yfit) dataf[0][0] = dataf[0][0] / yyscale dataf[1][0] = dataf[1][0] / yyscale dataf[2][0] = dataf[2][0] / yyscale dataf[3][0] = dataf[3][0] / yyscale pyfits.writeto(imgsc, np.float32(dataf), hdrf) ntt.util.updateheader(imgsc, 0, {'SENSPHOT': [outputsens, 'sens used to flux cal'], 'FILETYPE': [42208, '1D wave,flux calib, tell cor'], 'TRACE1': [imgf, 'Originating file']}) # ntt.util.updateheader(imgsc,0,{'FILETYPE':[42208,'1D wave,flux calib, tell cor']}) # ntt.util.updateheader(imgsc,0,{'TRACE1':[imgf,'']}) print '\n### flux calibrated spectrum= ', imgf, ' with the standard= ', stdpf if imgsc not in outputfile: outputfile.append(imgsc) else: print '\n### photometric calibrated not performed \n' print '\n### adding keywords for phase 3 ....... ' reduceddata = ntt.util.rangedata(outputfile) f = open('logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list', 'w') for img in outputfile: if str(img)[-5:] == '.fits': hdr = ntt.util.readhdr(img) # added for DR2 if 'NCOMBINE' in hdr: _ncomb = ntt.util.readkey3(hdr, 'NCOMBINE') else: _ncomb = 1.0 _effron = 12. * \ (1 / np.sqrt(ntt.util.readkey3(hdr, 'ndit') * _ncomb)) * \ np.sqrt(np.pi / 2) try: ntt.util.phase3header(img) # phase 3 definitions ntt.util.updateheader(img, 0, {'quality': ['Final', ''], 'EFFRON': [_effron, 'Effective readout noise per output (e-)']}) f.write(ntt.util.readkey3( ntt.util.readhdr(img), 'arcfile') + '\n') except: print 'Warning: ' + img + ' is not a fits file' f.close() return outputfile, 'logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list'
def efoscspec1Dredu(files, _interactive, _ext_trace, _dispersionline, liststandard, listatmo0, _automaticex, _verbose=False): # print "LOGX:: Entering `efoscspec1Dredu` method/function in # %(__file__)s" % globals() import ntt try: import pyfits except: from astropy.io import fits as pyfits import re import string import sys import os import numpy as np os.environ["PYRAF_BETA_STATUS"] = "1" _extinctdir = 'direc$standard/extinction/' _extinction = 'lasilla2.txt' _observatory = 'lasilla' import datetime now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days dv = ntt.dvex() scal = np.pi / 180. _gain = ntt.util.readkey3(ntt.util.readhdr( re.sub('\n', '', files[0])), 'gain') _rdnoise = ntt.util.readkey3( ntt.util.readhdr(re.sub('\n', '', files[0])), 'ron') std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') objectlist = {} for img in files: hdr = ntt.util.readhdr(img) img = re.sub('\n', '', img) ntt.util.correctcard(img) _ra = ntt.util.readkey3(hdr, 'RA') _dec = ntt.util.readkey3(hdr, 'DEC') _object = ntt.util.readkey3(hdr, 'object') _grism = ntt.util.readkey3(hdr, 'grism') _filter = ntt.util.readkey3(hdr, 'filter') _slit = ntt.util.readkey3(hdr, 'slit') dd = np.arccos(np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos((_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' else: _type = 'obj' if min(dd) < 100: ntt.util.updateheader( img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) if _type not in objectlist: objectlist[_type] = {} if (_grism, _filter, _slit) not in objectlist[_type]: objectlist[_type][_grism, _filter, _slit] = [img] else: objectlist[_type][_grism, _filter, _slit].append(img) from pyraf import iraf iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) iraf.imutil(_doprint=0) toforget = ['imutil.imcopy', 'specred.sarith', 'specred.standard'] for t in toforget: iraf.unlearn(t) iraf.specred.verbose = 'no' iraf.specred.dispaxi = 2 iraf.set(direc=ntt.__path__[0] + '/') sens = {} print objectlist outputfile = [] if 'obj' in objectlist.keys(): tpe = 'obj' elif 'stdsens' in objectlist.keys(): tpe = 'stdsens' else: sys.exit('error: no objects and no standards in the list') for setup in objectlist[tpe]: extracted = [] listatmo = [] if setup not in sens: sens[setup] = [] if tpe == 'obj': print '\n### setup= ', setup, '\n### objects= ', objectlist['obj'][setup], '\n' for img in objectlist['obj'][setup]: # hdr=readhdr(img) print '\n\n### next object= ', img, ' ', ntt.util.readkey3(ntt.util.readhdr(img), 'object'), '\n' if os.path.isfile(re.sub('.fits', '_ex.fits', img)): if ntt.util.readkey3(ntt.util.readhdr(re.sub('.fits', '_ex.fits', img)), 'quality') == 'Rapid': ntt.util.delete(re.sub('.fits', '_ex.fits', img)) imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'obj', automaticex=_automaticex) if not os.path.isfile(imgex): sys.exit('### error, extraction not computed') if not ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') != 0.0: # if not readkey3(readhdr(imgex),'shift'): ntt.efoscspec1Ddef.checkwavestd(imgex, _interactive) extracted.append(imgex) if imgex not in outputfile: outputfile.append(imgex) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader(imgex, 0, { 'PRODCATG': ['SCIENCE.' + ntt.util.readkey3(ntt.util.readhdr(imgex), 'tech').upper(), 'Data product category']}) ntt.util.updateheader( imgex, 0, {'TRACE1': [img, 'Originating file']}) if os.path.isfile('database/ap' + re.sub('_ex.fits', '', imgex)): if 'database/ap' + re.sub('_ex.fits', '', imgex) not in outputfile: outputfile.append( 'database/ap' + re.sub('_ex.fits', '', imgex)) print '\n### all object with this setup extracted\n' if liststandard: standardlist = liststandard _type = 'stdfromdreducer' else: try: standardlist = objectlist['stdsens'][setup] _type = 'stdsens' except: standardlist = '' _type = '' if _type == 'stdfromdreducer' and len(extracted) >= 1: _outputsens2 = ntt.util.searchsens(extracted[0], standardlist)[0] print '\n### using standard from reducer ' + str(_outputsens2) elif _type not in ['stdsens', 'stdfromdreducer'] and len(extracted) >= 1: _outputsens2 = ntt.util.searchsens(extracted[0], '')[0] os.system('cp ' + _outputsens2 + ' .') _outputsens2 = string.split(_outputsens2, '/')[-1] print '\n### no standard in the list, using standard from archive' else: for simg in standardlist: print '\n### standard for setup ' + \ str(setup) + ' = ', simg, ' ', ntt.util.readkey3( ntt.util.readhdr(simg), 'object'), '\n' simgex = ntt.util.extractspectrum( simg, dv, False, False, _interactive, 'std', automaticex=_automaticex) ntt.util.updateheader( simgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']}) ntt.util.updateheader(simgex, 0, { 'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3(ntt.util.readhdr(simgex), 'tech').upper(), 'Data product category']}) ntt.util.updateheader( simgex, 0, {'TRACE1': [simg, 'Originating file']}) if not ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') != 0.0: # if not readkey3(readhdr(simgex),'shift'): ntt.efoscspec1Ddef.checkwavestd(simgex, _interactive) atmofile = ntt.efoscspec1Ddef.telluric_atmo( simgex) # atmo file2 ntt.util.updateheader( atmofile, 0, {'TRACE1': [simgex, 'Originating file']}) ntt.util.updateheader( atmofile, 0, {'FILETYPE': [21211, 'telluric correction 1D spectrum ']}) if tpe != 'obj' and atmofile not in outputfile: outputfile.append(atmofile) if not listatmo0: listatmo.append(atmofile) sens[setup].append(simgex) if simgex not in outputfile: outputfile.append(simgex) if setup[0] == 'Gr13' and setup[1] == 'Free': if os.path.isfile(re.sub('Free', 'GG495', simg)): print '\n### extract standard frame with blocking filter to correct for second order contamination\n' simg2 = re.sub('Free', 'GG495', simg) simgex2 = ntt.util.extractspectrum(simg2, dv, False, False, _interactive, 'std', automaticex=_automaticex) ntt.util.updateheader( simgex2, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']}) ntt.util.updateheader(simgex2, 0, { 'PRODCATG': ['SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(simgex2), 'tech').upper(), 'Data product category']}) if not ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') != 0.0: # if not readkey3(readhdr(simgex2),'shift'): ntt.efoscspec1Ddef.checkwavestd( simgex2, _interactive) ntt.util.updateheader( simgex2, 0, {'TRACE1': [simg2, 'Originating file']}) print '\n### standard available: ', sens[setup] if tpe == 'obj': if len(sens[setup]) > 1: goon = 'no' while goon != 'yes': stdused = raw_input( '\n### more than one standard for this setup, which one do you want to use [' + sens[setup][ 0] + '] ?') if not stdused: stdused = sens[setup][0] if os.path.isfile(stdused): goon = 'yes' else: stdused = sens[setup][0] stdvec = [stdused] else: stdvec = sens[setup] for stdused in stdvec: stdusedclean = re.sub('_ex', '_clean', stdused) ntt.util.delete(stdusedclean) iraf.specred.sarith( input1=stdused, op='/', input2=atmofile, output=stdusedclean, format='multispec') _outputsens2 = ntt.efoscspec1Ddef.sensfunction( stdusedclean, 'spline3', 16, _interactive) ntt.util.updateheader(_outputsens2, 0, {'FILETYPE': [ 21212, 'sensitivity function']}) ntt.util.updateheader( _outputsens2, 0, {'TRACE1': [stdused, 'Originating file']}) if setup[0] == 'Gr13' and setup[1] == 'Free': if os.path.isfile(re.sub('Free', 'GG495', stdused)): print '\n### compute sensitivity function of grim 13 with blocking filter ' \ 'to correct for second order contamination \n' stdused2 = re.sub('Free', 'GG495', stdused) if not ntt.util.readkey3(ntt.util.readhdr(stdused2), 'STDNAME'): ntt.util.updateheader(stdused2, 0, { 'STDNAME': [ntt.util.readkey3(ntt.util.readhdr(stdused), 'STDNAME'), '']}) atmofile2 = ntt.efoscspec1Ddef.telluric_atmo( stdused2) # atmo file2 stdusedclean2 = re.sub('_ex', '_clean', stdused2) ntt.util.delete(stdusedclean2) iraf.specred.sarith(input1=stdused2, op='/', input2=atmofile2, output=stdusedclean2, format='multispec') _outputsens3 = ntt.efoscspec1Ddef.sensfunction( stdusedclean2, 'spline3', 16, _interactive) ntt.util.updateheader(_outputsens3, 0, {'FILETYPE': [ 21212, 'sensitivity function']}) ntt.util.updateheader( _outputsens3, 0, {'TRACE1': [stdused2, 'Originating file']}) _outputsens2 = correctsens(_outputsens2, _outputsens3) if _outputsens2 not in outputfile: outputfile.append(_outputsens2) if _outputsens2 and tpe == 'obj': #################################################### for img in objectlist['obj'][setup]: # flux calibrate 2d images imgd = fluxcalib2d(img, _outputsens2) ntt.util.updateheader( imgd, 0, {'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum ']}) ntt.util.updateheader( imgd, 0, {'TRACE1': [img, 'Originating files']}) iraf.hedit(imgd, 'PRODCATG', delete='yes', update='yes', verify='no') if imgd not in outputfile: outputfile.append(imgd) #################################################### # flux calib in the standard way if not listatmo and listatmo0: listatmo = listatmo0[:] for _imgex in extracted: _airmass = ntt.util.readkey3( ntt.util.readhdr(_imgex), 'airmass') _exptime = ntt.util.readkey3( ntt.util.readhdr(_imgex), 'exptime') _imgf = re.sub('_ex.fits', '_f.fits', _imgex) ntt.util.delete(_imgf) qqq = iraf.specred.calibrate(input=_imgex, output=_imgf, sensiti=_outputsens2, extinct='yes', flux='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, ignorea='yes', exptime=_exptime, fnu='no') hedvec = {'SENSFUN': [_outputsens2, ''], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum', ''], # 'SNR':[ntt.util.StoN(_imgf,50),'Average signal to noise ratio per pixel'], 'SNR': [ntt.util.StoN2(_imgf, False), 'Average signal to noise ratio per pixel'], 'BUNIT': ['erg/cm2/s/Angstrom', 'Physical unit of array values'], 'TRACE1': [_imgex, 'Originating file'], 'ASSON1': [re.sub('_f.fits', '_2df.fits', _imgf), 'Name of associated file'], 'ASSOC1': ['ANCILLARY.2DSPECTRUM', 'Category of associated file']} ntt.util.updateheader(_imgf, 0, hedvec) if _imgf not in outputfile: outputfile.append(_imgf) if listatmo: atmofile = ntt.util.searcharc(_imgex, listatmo)[0] if atmofile: _imge = re.sub('_f.fits', '_e.fits', _imgf) ntt.util.delete(_imge) iraf.specred.sarith(input1=_imgf, op='/', input2=atmofile, output=_imge, w1='INDEF', w2='INDEF', format='multispec') try: iraf.imutil.imcopy( input=_imgf + '[*,1,2]', output=_imge + '[*,1,2]', verbose='no') except: pass try: iraf.imutil.imcopy( input=_imgf + '[*,1,3]', output=_imge + '[*,1,3]', verbose='no') except: pass try: iraf.imutil.imcopy( input=_imgf + '[*,1,4]', output=_imge + '[*,1,4]', verbose='no') except: pass if _imge not in outputfile: outputfile.append(_imge) ntt.util.updateheader( _imge, 0, {'FILETYPE': [22210, '1D, wave, flux calib, telluric corr.']}) if atmofile not in outputfile: outputfile.append(atmofile) ntt.util.updateheader( _imge, 0, {'ATMOFILE': [atmofile, '']}) ntt.util.updateheader( _imge, 0, {'TRACE1': [_imgf, 'Originating file']}) imgin = _imge else: imgin = _imgf else: imgin = _imgf imgasci = re.sub('.fits', '.asci', imgin) ntt.util.delete(imgasci) iraf.onedspec(_doprint=0) iraf.onedspec.wspectext( imgin + '[*,1,1]', imgasci, header='no') if imgasci not in outputfile: outputfile.append(imgasci) print '\n### adding keywords for phase 3 ....... ' for img in outputfile: if str(img)[-5:] == '.fits': try: ntt.util.phase3header(img) # phase 3 definitions ntt.util.updateheader(img, 0, {'quality': ['Final', '']}) except: print 'Warning: ' + img + ' is not a fits file' try: if int(re.sub('\.', '', str(pyfits.__version__))[:2]) <= 30: aa = 'HIERARCH ' else: aa = '' except: aa = '' imm = pyfits.open(img, mode='update') hdr = imm[0].header if aa + 'ESO DPR CATG' in hdr: hdr.pop(aa + 'ESO DPR CATG') if aa + 'ESO DPR TECH' in hdr: hdr.pop(aa + 'ESO DPR TECH') if aa + 'ESO DPR TYPE' in hdr: hdr.pop(aa + 'ESO DPR TYPE') imm.flush() imm.close() print outputfile reduceddata = ntt.rangedata(outputfile) f = open('logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list', 'w') for img in outputfile: try: f.write(ntt.util.readkey3(ntt.util.readhdr(img), 'arcfile') + '\n') except: pass f.close() return outputfile, 'logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list'
def makeflat(lista): # print "LOGX:: Entering `makeflat` method/function in %(__file__)s" % # globals() flat = '' import datetime import glob import os import ntt from ntt.util import readhdr, readkey3, delete, name_duplicate, updateheader, correctcard from pyraf import iraf iraf.images(_doprint=0, Stdout=0) iraf.imutil(_doprint=0, Stdout=0) iraf.imgeom(_doprint=0, Stdout=0) # iraf.blkavg(_doprint=0, Stdout=0) iraf.noao(_doprint=0, Stdout=0) iraf.imred(_doprint=0, Stdout=0) iraf.generic(_doprint=0, Stdout=0) toforget = [ 'imgeom.blkavg', 'imutil.imarith', 'immatch.imcombine', 'noao.imred' ] for t in toforget: iraf.unlearn(t) import datetime MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days _date = readkey3(readhdr(lista[0]), 'date-night') _filter = readkey3(readhdr(lista[0]), 'filter') output = name_duplicate( lista[3], 'flat_' + str(_date) + '_' + str(_filter) + '_' + str(MJDtoday), '') if os.path.isfile(output): answ = raw_input('file already prooduced, do again [y/[n]] ? ') if not answ: answ = 'n' else: answ = 'y' if answ in ['yes', 'y', 'YES', 'Y', 'Yes']: delete( "temp_off.fits,temp_off_mask.fits,temp_on_mask.fits,temp_on.fits") iraf.image.immatch.imcombine(lista[0] + ',' + lista[7], output="temp_off.fits") iraf.image.immatch.imcombine(lista[1] + ',' + lista[6], output="temp_off_mask.fits") iraf.image.immatch.imcombine(lista[2] + ',' + lista[5], output="temp_on_mask.fits") iraf.image.immatch.imcombine(lista[3] + ',' + lista[4], output="temp_on.fits") # create the bias correction for the flat-on according to the # Lidman technique0 delete( "temp_onA.fits,temp_onC.fits,temp_onB.fits,temp_onAC.fits,temp_onACB.fits,temp_onACB_2D.fits" ) delete("temp_on_bias.fits") iraf.imgeom.blkavg(input="temp_on.fits[500:600,*]", output="temp_onA.fits", option="average", b1=101, b2=1) iraf.imgeom.blkavg(input="temp_on_mask.fits[500:600,*]", output="temp_onC.fits", option="average", b1=101, b2=1) iraf.imgeom.blkavg(input="temp_on_mask.fits[50:150,*]", output="temp_onB.fits", option="average", b1=101, b2=1) iraf.imutil.imarith("temp_onA.fits", "-", "temp_onC.fits", "temp_onAC.fits") iraf.imutil.imarith("temp_onAC.fits", "+", "temp_onB.fits", "temp_onACB.fits") iraf.imgeom.blkrep(input="temp_onACB.fits", output="temp_onACB_2D.fits", b1=1024, b2=1) iraf.imutil.imarith("temp_on.fits", "-", "temp_onACB_2D.fits", "temp_on_bias.fits") # same as above for the flat-off delete( "temp_offA.fits,temp_offC.fits,temp_offB.fits,temp_offAC.fits,temp_offACB.fits,temp_offACB_2D.fits" ) delete("temp_off_bias.fits") iraf.imgeom.blkavg(input="temp_off.fits[500:600,*]", output="temp_offA.fits", option="average", b1=101, b2=1) iraf.imgeom.blkavg(input="temp_off_mask.fits[500:600,*]", output="temp_offC.fits", option="average", b1=101, b2=1) iraf.imgeom.blkavg(input="temp_off_mask.fits[50:150,*]", output="temp_offB.fits", option="average", b1=101, b2=1) iraf.imutil.imarith("temp_offA.fits", "-", "temp_offC.fits", "temp_offAC.fits") iraf.imutil.imarith("temp_offAC.fits", "+", "temp_offB.fits", "temp_offACB.fits") iraf.imgeom.blkrep(input="temp_offACB.fits", output="temp_offACB_2D.fits", b1=1024, b2=1) iraf.imutil.imarith("temp_off.fits", "-", "temp_offACB_2D.fits", "temp_off_bias.fits") # create the corrected flat-field # output=name_duplicate("temp_on_bias.fits",'flat_'+str(_date)+'_'+str(_filter)+'_'+str(MJDtoday),'') output = name_duplicate( lista[3], 'flat_' + str(_date) + '_' + str(_filter) + '_' + str(MJDtoday), '') # print lista[0],'flat_'+str(_date)+'_'+str(_filter)+'_'+str(MJDtoday) delete(output) iraf.imutil.imarith("temp_on_bias.fits", "-", "temp_off_bias.fits", output) iraf.noao.imred.generic.normalize(output) # normalize the flat-field correctcard(output) delete("temp_on*.fits") # delete the temporary images delete("temp_off*.fits") print 'flat -> ' + str(output) else: print 'skip redoing the flat' return output
def create_masterflat(flatdir=None, biasdir=None, channel='rc', plot=True): ''' Creates a masterflat from both dome flats and sky flats if the number of counts in the given filter is not saturated and not too low (between 3000 and 40000). ''' if (flatdir == None or flatdir == ""): flatdir = "." if (biasdir == None or biasdir == ""): biasdir = flatdir os.chdir(flatdir) if (plot and not os.path.isdir("reduced/flats")): os.makedirs("reduced/flats") if (len(glob.glob("Flat_%s*norm.fits" % channel)) == 4): logger.info("Master Flat exists!") return if (len(glob.glob("Flat_%s*norm.fits" % channel)) > 0): logger.info("Some Master Flat exist!") else: logger.info("Starting the Master Flat creation!") bias_slow = "Bias_%s_slow.fits" % channel bias_fast = "Bias_%s_fast.fits" % channel if (not os.path.isfile(bias_slow) and not os.path.isfile(bias_fast)): create_masterbias(biasdir) lsflat = [] lfflat = [] obj = "" imtype = "" #Select all filts that are Flats with same instrument for f in glob.glob(channel + "*fits"): try: if fitsutils.has_par(f, "OBJECT"): obj = str.upper(fitsutils.get_par(f, "OBJECT")) else: continue if fitsutils.has_par(f, "IMGTYPE"): imtype = str.upper(fitsutils.get_par(f, "IMGTYPE")) else: continue #if ("RAINBOW CAM" in str.upper(fitsutils.get_par(f, "CAM_NAME")) and ("DOME" in obj or "FLAT" in obj or "Twilight" in obj or "TWILIGHT" in imtype or "DOME" in imtype)): if ("twilight" in imtype.lower()): if (fitsutils.get_par(f, "ADCSPEED") == 2): lfflat.append(f) else: lsflat.append(f) except: logger.error("Error with retrieving parameters for file %s" % f) pass logger.info("Files for slow flat %s" % lsflat) logger.info("Files for fast flat %s" % lfflat) fsfile = "lflat_slow_" + channel np.savetxt(fsfile, np.array(lsflat), fmt="%s") fffile = "lflat_fast_" + channel np.savetxt(fffile, np.array(lfflat), fmt="%s") # Running IRAF iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) #Remove bias from the flat if len(lsflat) > 0: iraf.imarith("@" + fsfile, "-", bias_slow, "b_@" + fsfile) if len(lfflat) > 0: iraf.imarith("@" + fffile, "-", bias_fast, "b_@" + fffile) #Remove the list files os.remove(fsfile) os.remove(fffile) #Slices the flats. debiased_flats = glob.glob("b_*.fits") for f in debiased_flats: logger.info("Slicing file %s" % f) try: slice_rc(f) except: logger.error( "Error when slicing file... deleting the unsliced one...") #Remove the un-sliced file os.remove(f) #Selects the ones that are suitable given the number of counts and combines them. bands = ['u', 'g', 'r', 'i'] for b in bands: out = "Flat_%s_%s.fits" % (channel, b) out_norm = out.replace(".fits", "_norm.fits") if (os.path.isfile(out_norm)): logger.error("Master Flat for filter %s exists. Skipping..." % b) continue lfiles = [] for f in glob.glob('b_*_%s.fits' % b): fi = fits.open(f) d = fi[0].data status = "rejected" if np.percentile(d, 90) > 4000 and np.percentile(d, 90) < 45000: lfiles.append(f) mymode = 1. * np.median(d.flatten()) d[d > 45000] = mymode fi[0].data = d fi.writeto(f, clobber=True) status = "accepted" if (plot): plt.title("Flat filter %s. %s" % (b, status)) plt.imshow(d.T, cmap=plt.get_cmap("nipy_spectral")) plt.colorbar() plt.savefig("reduced/flats/%s" % (f.replace(".fits", ".png"))) plt.close() #Make sure that the optimum number of counts is not too low and not saturated. if len(lfiles) == 0: logger.error( "WARNING!!! Could not find suitable flats for band %s" % b) continue if len(lfiles) < 3: logger.error( "WARNING!!! Could find less than 3 flats for band %s. Skipping, as it is not reliable..." % b) continue ffile = "lflat_" + b np.savetxt(ffile, np.array(lfiles), fmt="%s") #Cleaning of old files if (os.path.isfile(out)): os.remove(out) if (os.path.isfile(out_norm)): os.remove(out_norm) if (os.path.isfile("Flat_stats")): os.remove("Flat_stats") #Combine flats iraf.imcombine(input = "@"+ffile, \ output = out, \ combine = "median",\ scale = "mode", reject = "sigclip", lsigma = 2., hsigma = 2, gain=1.7, rdnoise=4.) iraf.imstat(out, fields="image,npix,mean,stddev,min,max,mode", Stdout="Flat_stats") st = np.genfromtxt("Flat_stats", names=True, dtype=None) #Normalize flats iraf.imarith(out, "/", st["MODE"], out_norm) #Do some cleaning logger.info('Removing from lfiles') for f in glob.glob('b_*_%s.fits' % b): os.remove(f) os.remove(ffile) if os.path.isfile(fsfile): os.remove(fsfile) if os.path.isfile(fffile): os.remove(fffile) #copy into the reference folder with current date newdir = os.path.join("../../refphot/", os.path.basename(os.path.abspath(flatdir))) if (not os.path.isdir(newdir)): os.makedirs(newdir) shutil.copy(out_norm, os.path.join(newdir, os.path.basename(out_norm))) copy_ref_calib(flatdir, "Flat")
def makeillumination(lista, flatfield): #,outputfile,illum_frame): import os, glob, string, re from astropy.io import fits as pyfits import ntt from ntt.util import readhdr, readkey3, delete, display_image, defsex, name_duplicate, correctcard from numpy import compress, array, argmax, argmin, min, argsort, float32 import datetime MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days _date = readkey3(readhdr(lista[0]), 'date-night') _filter = readkey3(readhdr(lista[0]), 'filter') illum_frame = name_duplicate( lista[0], 'illum_' + _date + '_' + _filter + '_' + str(MJDtoday), '') from pyraf import iraf iraf.images(_doprint=0, Stdout=0) iraf.imutil(_doprint=0, Stdout=0) iraf.utilities(_doprint=0, Stdout=0) iraf.noao(_doprint=0, Stdout=0) iraf.imred(_doprint=0, Stdout=0) iraf.ccdred(_doprint=0, Stdout=0) iraf.digiphot(_doprint=0, Stdout=0) iraf.daophot(_doprint=0, Stdout=0) iraf.generic(_doprint=0, Stdout=0) toforget = [ 'digiphot.daophot', 'imutil.imarith', 'image', 'utilities.surfit' ] for t in toforget: iraf.unlearn(t) n = len(lista) # start loop to read image names from the input file lista1 = [] iraf.ccdred.verbose = 'no' ff = open('templist.lst', 'w') for i in range(0, len(lista)): ff.write('C' + lista[i] + '\n') delete('C' + lista[i]) delete('C' + re.sub('.fits', '_sub.fits', lista[i])) ntt.sofiphotredudef.crosstalk(lista[i], 'C' + lista[i]) iraf.noao.imred.ccdred.ccdproc('C' + lista[i], output='', overscan="no", trim="yes", ccdtype='', darkcor='no', fixpix='no', zerocor="no", flatcor='yes', illumco='no', trimsec='[1:1024,1:1007]', biassec='', flat=flatfield, illum='') correctcard('C' + lista[i]) lista1.append('C' + lista[i]) ff.close() print '\n### prereducing STD frames to compute illumination correction ........' lista2, skyfile = ntt.sofiphotredudef.skysub( lista1, readkey3(readhdr(lista1[0]), 'ron'), readkey3(readhdr(lista1[0]), 'gain'), True) lista2 = ntt.sofiphotredudef.sortbyJD(lista2) print '\n### use x on the star and q to continue....' display_image(lista2[0], 2, '', '', False) delete('tmpone.coo') iraf.image.tv.imexamine(lista2[0], 2, logfile='tmpone.coo', keeplog='yes', xformat='', yformat='', wcs='logical') iraf.tvmark(2, 'tmpone.coo', mark="circle", number='yes', label='no', radii=8, nxoffse=5, nyoffse=5, color=204, txsize=2) xycoo = iraf.proto.fields('tmpone.coo', '1,2', Stdout=1) x0, y0 = string.split(xycoo[0]) x0 = float(x0) y0 = float(y0) xcum0 = readkey3(readhdr(lista2[0]), 'xcum') ycum0 = readkey3(readhdr(lista2[0]), 'ycum') iraf.digiphot(_doprint=0, Stdout=0) iraf.daophot(_doprint=0, Stdout=0) iraf.noao.digiphot.daophot.datapars.datamin = -1000 iraf.noao.digiphot.daophot.datapars.datamax = 60000 iraf.noao.digiphot.daophot.daopars.function = 'gauss' iraf.noao.digiphot.daophot.photpars.zmag = 0 namesex = defsex('default.sex') for i in range(0, len(lista2)): j = i + 1 xcum = readkey3(readhdr(lista2[i]), 'xcum') ycum = readkey3(readhdr(lista2[i]), 'ycum') xx = x0 - xcum0 + xcum yy = y0 - ycum0 + ycum # sex objects os.system('sex ' + lista2[i] + ' -c ' + namesex + '> _logsex') delete('_logsex') xpix = iraf.proto.fields('detections.cat', fields='2', Stdout=1) ypix = iraf.proto.fields('detections.cat', fields='3', Stdout=1) cm = iraf.proto.fields('detections.cat', fields='4', Stdout=1) cm = compress((array(xpix) != ''), array(cm, float)) ypix = compress((array(xpix) != ''), array(ypix, float)) xpix = compress((array(xpix) != ''), array(xpix, float)) if len(xpix) > 300: num = 300 else: num = len(xpix) - 1 xpix = xpix[argsort(cm)][0:num] ypix = ypix[argsort(cm)][0:num] distance = (ypix - yy)**2 + (xpix - xx)**2 xx1, yy1 = xpix[argmin(distance)], ypix[argmin(distance)] f = open('tmpone.coo', 'w') f.write(str(xx1) + ' ' + str(yy1) + '\n') f.close() display_image(lista2[i], 1, '', '', False) iraf.tvmark(1, 'tmpone.coo', mark="circle", number='yes', label='no', radii=8, nxoffse=5, nyoffse=5, color=204, txsize=2) answ = 'n' while answ != 'y': answ = raw_input('selected the right one [[y]/n] ?') if not answ: answ = 'y' if answ in ['y', 'YES', 'yes', 'Y']: print lista2[i] delete('pippo.' + str(j) + '.mag') gggg = iraf.digiphot.daophot.phot(lista2[i], "tmpone.coo", output="pippo." + str(j) + ".mag", verify='no', interac='no', Stdout=1) try: float(string.split(gggg[0])[3]) answ = 'y' except: print '\n### warning' answ = 'n' else: print '\n### select the std star' display_image(lista2[i], 1, '', '', False) iraf.image.tv.imexamine(lista2[i], 1, logfile='tmpone.coo', keeplog='yes', xformat='', yformat='', wcs='logical') xycoo = iraf.proto.fields('tmpone.coo', '1,2', Stdout=1) x2, y2 = string.split(xycoo[0]) f = open('tmpone.coo', 'w') f.write(str(x2) + ' ' + str(y2) + '\n') f.close() delete('pippo.' + str(j) + '.mag') print '###### new selection ' + str(x2), str(y2) gggg = iraf.digiphot.daophot.phot(lista2[i], "tmpone.coo", output='pippo.' + str(j) + '.mag', verify='no', interac='no', Stdout=1) try: float(string.split(gggg[0])[3]) answ = 'y' except: print '\n### warning' answ = 'n' os.system('ls pippo.*.mag > tempmag.lst') tmptbl0 = iraf.txdump(textfile="@tempmag.lst", fields="XCENTER,YCENTER,FLUX", expr='yes', Stdout=1) ff = open('magnitudini', 'w') for i in tmptbl0: ff.write(i + '\n') ff.close() # delete the temporary images and files delete("temp*.fits") delete('temp*.lst') delete(illum_frame) print '\n### fitting the illumination surface...' aaa = iraf.utilities.surfit('magnitudini', image=illum_frame, function="polynomial", xorder=2, yorder=2, xterms="full", ncols=1024, nlines=1024, Stdout=1) iraf.noao.imred.generic.normalize(illum_frame) correctcard(lista[0]) data, hdr = pyfits.getdata(illum_frame, 0, header=True) data0, hdr0 = pyfits.getdata(lista[0], 0, header=True) delete(illum_frame) pyfits.writeto(illum_frame, float32(data), hdr0) flatfield0 = string.split(flatfield, '/')[-1] ntt.util.updateheader(illum_frame, 0, {'MKILLUM': [flatfield0, 'flat field']}) display_image(illum_frame, 1, '', '', False) for i in range(0, len(lista)): # in lista: img = lista[i] delete('pippo.' + str(i) + '.mag') delete('C' + img) delete('C' + re.sub('.fits', '_sky.fits', img)) # delete('C*.fits.mag.1') # iraf.hedit(illum_frame,'MKILLUM','Illum. corr. created '+flatfield,add='yes',update='yes',verify='no') return illum_frame
def reduce_image(image, flatdir=None, biasdir=None, cosmic=False, astrometry=True, channel='rc', target_dir='reduced', overwrite=False): ''' Applies Flat field and bias calibrations to the image. Steps: 1. - Solve astrometry on the entire image. 2. - Computes cosmic ray rejectionon the entire image. 3. - Compute master bias (if it does not exist) and de-bias the image. 4. - Separate the image into 4 filters. 5. - Compute flat field for each filter (if it does not exist) and apply flat fielding on the image. 6. - Compute the image zeropoint. ''' logger.info("Reducing image %s" % image) print "Reducing image ", image image = os.path.abspath(image) imname = os.path.basename(image).replace(".fits", "") try: objectname = fitsutils.get_par(image, "NAME").replace( " ", "") + "_" + fitsutils.get_par(image, "FILTER") except: logger.error("ERROR, image " + image + " does not have a NAME or a FILTER!!!") return print "For object", objectname logger.info("For object %s" % objectname) #Change to image directory mydir = os.path.dirname(image) if mydir == "": mydir = "." mydir = os.path.abspath(mydir) os.chdir(mydir) #Create destination directory if (not os.path.isdir(target_dir)): os.makedirs(target_dir) #If we don't want to overwrite the already extracted images, we check wether they exist. if (not overwrite): existing = True for band in ['u', 'g', 'r', 'i']: destfile = os.path.join( target_dir, imname + "_f_b_a_%s_%s_0.fits" % (objectname, band)) logger.info( "Looking if file %s exists: %s"%( destfile, \ (os.path.isfile(destfile) ) ) ) existing = existing and (os.path.isfile(destfile)) if existing: return [] #Initialize the basic parameters. init_header_reduced(image) astro = "" if (astrometry): logger.info("Solving astometry for the whole image...") img = solve_astrometry(image) if (os.path.isfile(img)): astro = "a_" fitsutils.update_par(img, "IQWCS", 1) else: logger.error("ASTROMETRY DID NOT SOLVE ON IMAGE %s" % image) img = image #Update noise parameters needed for cosmic reection if (fitsutils.get_par(img, "ADCSPEED") == 2): fitsutils.update_par(img, "RDNOISE", 20.) else: fitsutils.update_par(img, "RDNOISE", 4.) if (cosmic): logger.info("Correcting for cosmic rays...") # Correct for cosmics each filter cleanimg = clean_cosmic(os.path.join(os.path.abspath(mydir), img)) img = cleanimg #Compute BIAS if (biasdir is None or biasdir == ""): biasdir = "." create_masterbias(biasdir) bias_slow = os.path.join(biasdir, "Bias_%s_%s.fits" % (channel, 'slow')) bias_fast = os.path.join(biasdir, "Bias_%s_%s.fits" % (channel, 'fast')) # Running IRAF to DE-BIAS iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) #Compute flat field if (flatdir is None or flatdir == ""): flatdir = "." create_masterflat(flatdir, biasdir) #New names for the object. debiased = os.path.join(os.path.dirname(img), "b_" + os.path.basename(img)) logger.info("Creating debiased file, %s" % debiased) if ( (fitsutils.get_par(img, "ADCSPEED")==0.1 and not os.path.isfile(bias_slow)) \ or (fitsutils.get_par(img, "ADCSPEED")==2 and not os.path.isfile(bias_fast)) ): logger.warn( "Master bias not found! Tryting to copy from reference folder...") copy_ref_calib(mydir, "Bias") if ( (fitsutils.get_par(img, "ADCSPEED")==0.1 and not os.path.isfile(bias_slow)) \ or (fitsutils.get_par(img, "ADCSPEED")==2 and not os.path.isfile(bias_fast)) ): logger.error("Bias not found in reference folder") return #Clean first if (os.path.isfile(debiased)): os.remove(debiased) #Debias if (fitsutils.get_par(img, "ADCSPEED") == 2): iraf.imarith(img, "-", bias_fast, debiased) fitsutils.update_par(debiased, "BIASFILE", bias_fast) fitsutils.update_par(debiased, "RDNOISE", 20.) else: iraf.imarith(img, "-", bias_slow, debiased) fitsutils.update_par(debiased, "BIASFILE", bias_slow) fitsutils.update_par(debiased, "RDNOISE", 4.) #Set negative counts to zero hdu = fits.open(debiased) header = hdu[0].header hdu[0].data[hdu[0].data < 0] = 0 hdu.writeto(debiased, clobber=True) #Slicing the image for flats slice_names = slice_rc(debiased) print "Creating sliced files, ", slice_names #Remove un-sliced image os.remove(debiased) # DE-flat each filter and store under object name for i, debiased_f in enumerate(slice_names): b = fitsutils.get_par(debiased_f, 'filter') deflatted = os.path.join( os.path.dirname(image), target_dir, imname + "_f_b_" + astro + objectname + "_%s.fits" % b) #Flat to be used for that filter flat = os.path.join(flatdir, "Flat_%s_%s_norm.fits" % (channel, b)) if (not os.path.isfile(flat)): logger.warn("Master flat not found in %s" % flat) copy_ref_calib(mydir, "Flat") continue else: logger.info("Using flat %s" % flat) #Cleans the deflatted file if exists if (os.path.isfile(deflatted)): os.remove(deflatted) if (os.path.isfile(debiased_f) and os.path.isfile(flat)): logger.info("Storing de-flatted %s as %s" % (debiased_f, deflatted)) time.sleep(1) iraf.imarith(debiased_f, "/", flat, deflatted) else: logger.error( "SOMETHING IS WRONG. Error when dividing %s by the flat field %s!" % (debiased_f, flat)) #Removes the de-biased file os.remove(debiased_f) logger.info( "Updating header with original filename and flat field used.") fitsutils.update_par(deflatted, "ORIGFILE", os.path.basename(image)) fitsutils.update_par(deflatted, "FLATFILE", flat) slice_names[i] = deflatted #Moving files to the target directory for image in slice_names: bkg = get_median_bkg(image) fitsutils.update_par(image, "SKYBKG", bkg) #Get basic statistics for the image nsrc, fwhm, ellip, bkg = sextractor.get_image_pars(image) logger.info( "Sextractor statistics: nscr %d, fwhm (arcsec) %.2f, ellipticity %.2f" % (nsrc, fwhm, ellip)) print "Sextractor statistics: nscr %d, fwhm (arcsec) %.2f, ellipticity %.2f" % ( nsrc, fwhm, ellip) dic = { "FWHM": np.round(fwhm, 3), "FWHMPIX": np.round(fwhm / 0.394, 3), "NSRC": nsrc, "ELLIP": np.round(ellip, 3) } #Update the seeing information from sextractor fitsutils.update_pars(image, dic) #Compute the zeropoints for image in slice_names: zeropoint.calibrate_zeropoint(image) return slice_names
def floydsautoredu(files, _interactive, _dobias, _doflat, _listflat, _listbias, _listarc, _cosmic, _ext_trace, _dispersionline, liststandard, listatmo, _automaticex, _classify=False, _verbose=False, smooth=1, fringing=1): import floyds import string, re, os, glob, sys, pickle from numpy import array, arange, mean, pi, arccos, sin, cos, argmin from astropy.io import fits from pyraf import iraf import datetime os.environ["PYRAF_BETA_STATUS"] = "1" iraf.set(direc=floyds.__path__[0] + '/') _extinctdir = 'direc$standard/extinction/' _tel = floyds.util.readkey3( floyds.util.readhdr(re.sub('\n', '', files[0])), 'TELID') if _tel == 'fts': _extinction = 'ssoextinct.dat' _observatory = 'sso' elif _tel == 'ftn': _extinction = 'maua.dat' _observatory = 'cfht' else: sys.exit('ERROR: observatory not recognised') dv = floyds.util.dvex() scal = pi / 180. iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.specred(_doprint=0) toforget = ['ccdred.flatcombine','ccdred.zerocombine','ccdproc','specred.apall','longslit.identify','longslit.reidentify',\ 'specred.standard','longslit.fitcoords','specred.transform','specred.response'] for t in toforget: iraf.unlearn(t) iraf.longslit.dispaxi = 2 iraf.longslit.mode = 'h' iraf.identify.fwidth = 7 iraf.identify.order = 2 iraf.specred.dispaxi = 2 iraf.specred.mode = 'h' iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.trim = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.overscan = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.biassec = '' iraf.ccdproc.ccdtype = '' iraf.ccdred.instrument = "/dev/null" if _verbose: iraf.ccdred.verbose = 'yes' iraf.specred.verbose = 'yes' else: iraf.specred.verbose = 'no' iraf.ccdred.verbose = 'no' now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55928 + (datetime.date.today() - datetime.date(2012, 01, 01)).days outputlist = [] hdra = floyds.util.readhdr(re.sub('\n', '', files[0])) _gain = floyds.util.readkey3(hdra, 'gain') _rdnoise = floyds.util.readkey3(hdra, 'ron') std, rastd, decstd, magstd = floyds.util.readstandard( 'standard_floyds_mab.txt') _naxis2 = hdra.get('NAXIS2') _naxis1 = hdra.get('NAXIS1') if not _naxis1: _naxis1 = 2079 if not _naxis2: if not hdr0.get('HDRVER'): _naxis1 = 511 else: _naxis1 = 512 _overscan = '[2049:' + str(_naxis1) + ',1:' + str(_naxis2) + ']' _biassecblu = '[380:2048,325:' + str(_naxis2) + ']' _biassecred = '[1:1800,1:350]' lista = {} objectlist = {} biaslist = {} flatlist = {} flatlistd = {} arclist = {} max_length = 14 for img in files: hdr0 = floyds.util.readhdr(img) if floyds.util.readkey3(hdr0, 'naxis2') >= 500: if 'blu' not in lista: lista['blu'] = [] if 'red' not in lista: lista['red'] = [] _object0 = floyds.util.readkey3(hdr0, 'object') _object0 = re.sub(':', '', _object0) # colon _object0 = re.sub('/', '', _object0) # slash _object0 = re.sub('\s', '', _object0) # any whitespace _object0 = re.sub('\(', '', _object0) # open parenthesis _object0 = re.sub('\[', '', _object0) # open square bracket _object0 = re.sub('\)', '', _object0) # close parenthesis _object0 = re.sub('\]', '', _object0) # close square bracket _object0 = _object0.replace(r'\t', '') # Any tab characters _object0 = _object0.replace('*', '') # Any asterisks if len(_object0) > max_length: _object0 = _object0[:max_length] _date0 = floyds.util.readkey3(hdr0, 'date-night') _tel = floyds.util.readkey3(hdr0, 'TELID') _type = floyds.util.readkey3(hdr0, 'OBSTYPE') if not _type: _type = floyds.util.readkey3(hdr0, 'imagetyp') _slit = floyds.util.readkey3(hdr0, 'slit') if _type: _type = _type.lower() if _type in ['sky', 'spectrum', 'expose']: nameoutb = str(_object0) + '_' + _tel + '_' + str( _date0) + '_blue_' + str(_slit) + '_' + str(MJDtoday) nameoutr = str(_object0) + '_' + _tel + '_' + str( _date0) + '_red_' + str(_slit) + '_' + str(MJDtoday) elif _type in ['lamp', 'arc', 'l']: nameoutb = 'arc_' + str(_object0) + '_' + _tel + '_' + str( _date0) + '_blue_' + str(_slit) + '_' + str(MJDtoday) nameoutr = 'arc_' + str(_object0) + '_' + _tel + '_' + str( _date0) + '_red_' + str(_slit) + '_' + str(MJDtoday) elif _type in ['flat', 'f', 'lampflat', 'lamp-flat']: nameoutb = 'flat_' + str(_object0) + '_' + _tel + '_' + str( _date0) + '_blue_' + str(_slit) + '_' + str(MJDtoday) nameoutr = 'flat_' + str(_object0) + '_' + _tel + '_' + str( _date0) + '_red_' + str(_slit) + '_' + str(MJDtoday) else: nameoutb = str(_type.lower( )) + '_' + str(_object0) + '_' + _tel + '_' + str( _date0) + '_blue_' + str(_slit) + '_' + str(MJDtoday) nameoutr = str(_type.lower( )) + '_' + str(_object0) + '_' + _tel + '_' + str( _date0) + '_red_' + str(_slit) + '_' + str(MJDtoday) bimg = floyds.util.name_duplicate(img, nameoutb, '') rimg = floyds.util.name_duplicate(img, nameoutr, '') #### floyds.util.delete(bimg) floyds.util.delete(rimg) iraf.imcopy(img, bimg, verbose='no') iraf.imcopy(img, rimg, verbose='no') aaa = iraf.hedit(bimg, 'CCDSEC', delete='yes', update='yes', verify='no', Stdout=1) aaa = iraf.hedit(bimg, 'TRIMSEC', delete='yes', update='yes', verify='no', Stdout=1) aaa = iraf.hedit(rimg, 'CCDSEC', delete='yes', update='yes', verify='no', Stdout=1) aaa = iraf.hedit(rimg, 'TRIMSEC', delete='yes', update='yes', verify='no', Stdout=1) iraf.ccdproc(bimg,output='', overscan="yes", trim="yes", zerocor='no', flatcor='no', zero='', ccdtype='',\ fixpix='no', trimsec=_biassecblu, biassec=_overscan, readaxi='line', Stdout=1) iraf.ccdproc(rimg,output='', overscan="yes", trim="yes", zerocor='no', flatcor='no', zero='', ccdtype='',\ fixpix='no', trimsec=_biassecred, biassec=_overscan, readaxi='line', Stdout=1) floyds.util.updateheader(bimg, 0, {'GRISM': ['blu', ' blue order']}) floyds.util.updateheader(rimg, 0, {'GRISM': ['red', ' blue order']}) floyds.util.updateheader( bimg, 0, {'arcfile': [img, 'file name in the archive']}) floyds.util.updateheader( rimg, 0, {'arcfile': [img, 'file name in the archive']}) lista['blu'].append(bimg) lista['red'].append(rimg) else: print 'warning type not defined' for arm in lista.keys(): for img in lista[arm]: print img hdr = floyds.util.readhdr(img) _type = floyds.util.readkey3(hdr, 'OBSTYPE') if _type == 'EXPOSE': _type = floyds.util.readkey3(hdr, 'imagetyp') if not _type: _type = 'EXPOSE' if _type == 'EXPOSE': print 'warning obstype still EXPOSE, are this old data ? run manually floydsfixheader' _slit = floyds.util.readkey3(hdr, 'slit') _grpid = floyds.util.readkey3(hdr, 'grpid') if _type.lower() in ['flat', 'f', 'lamp-flat', 'lampflat']: if (arm, _slit) not in flatlist: flatlist[arm, _slit] = {} if _grpid not in flatlist[arm, _slit]: flatlist[arm, _slit][_grpid] = [img] else: flatlist[arm, _slit][_grpid].append(img) elif _type.lower() in ['lamp', 'l', 'arc']: if (arm, _slit) not in arclist: arclist[arm, _slit] = {} if _grpid not in arclist[arm, _slit]: arclist[arm, _slit][_grpid] = [img] else: arclist[arm, _slit][_grpid].append(img) elif _type in ['bias', 'b']: if arm not in biaslist: biaslist[arm] = [] biaslist[arm].append(img) elif _type.lower() in ['sky', 's', 'spectrum']: try: _ra = float(floyds.util.readkey3(hdr, 'RA')) _dec = float(floyds.util.readkey3(hdr, 'DEC')) except: ra00 = string.split(floyds.util.readkey3(hdr, 'RA'), ':') ra0, ra1, ra2 = float(ra00[0]), float(ra00[1]), float( ra00[2]) _ra = ((ra2 / 60. + ra1) / 60. + ra0) * 15. dec00 = string.split(floyds.util.readkey3(hdr, 'DEC'), ':') dec0, dec1, dec2 = float(dec00[0]), float(dec00[1]), float( dec00[2]) if '-' in str(dec0): _dec = (-1) * ((dec2 / 60. + dec1) / 60. + ((-1) * dec0)) else: _dec = (dec2 / 60. + dec1) / 60. + dec0 dd = arccos( sin(_dec * scal) * sin(decstd * scal) + cos(_dec * scal) * cos(decstd * scal) * cos( (_ra - rastd) * scal)) * ((180 / pi) * 3600) if _verbose: print _ra, _dec print std[argmin(dd)], min(dd) if min(dd) < 5200: _typeobj = 'std' else: _typeobj = 'obj' if min(dd) < 5200: floyds.util.updateheader( img, 0, {'stdname': [std[argmin(dd)], '']}) floyds.util.updateheader( img, 0, {'magstd': [float(magstd[argmin(dd)]), '']}) if _typeobj not in objectlist: objectlist[_typeobj] = {} if (arm, _slit) not in objectlist[_typeobj]: objectlist[_typeobj][arm, _slit] = [img] else: objectlist[_typeobj][arm, _slit].append(img) if _verbose: print 'object' print objectlist print 'flat' print flatlist print 'bias' print biaslist print 'arc' print arclist if liststandard and 'std' in objectlist.keys(): print 'external standard, raw standard not used' del objectlist['std'] sens = {} outputfile = {} atmo = {} for tpe in objectlist: if tpe not in outputfile: outputfile[tpe] = {} for setup in objectlist[tpe]: if setup not in sens: sens[setup] = [] print '\n### setup= ', setup, '\n### objects= ', objectlist[tpe][ setup], '\n' for img in objectlist[tpe][setup]: print '\n\n### next object= ', img, ' ', floyds.util.readkey3( floyds.util.readhdr(img), 'object'), '\n' hdr = floyds.util.readhdr(img) archfile = floyds.util.readkey3(hdr, 'arcfile') _gain = floyds.util.readkey3(hdr, 'gain') _rdnoise = floyds.util.readkey3(hdr, 'ron') _grism = floyds.util.readkey3(hdr, 'grism') _grpid = floyds.util.readkey3(hdr, 'grpid') if archfile not in outputfile[tpe]: outputfile[tpe][archfile] = [] ##################### flat ############### if _listflat: flatgood = _listflat # flat list from reducer elif setup in flatlist: if _grpid in flatlist[setup]: print '\n###FLAT WITH SAME GRPID' flatgood = flatlist[setup][ _grpid] # flat in the raw data else: flatgood = [] for _grpid0 in flatlist[setup].keys(): for ii in flatlist[setup][_grpid0]: flatgood.append(ii) else: flatgood = [] if len(flatgood) != 0: if len(flatgood) > 1: f = open('_oflatlist', 'w') for fimg in flatgood: print fimg f.write(fimg + '\n') f.close() floyds.util.delete('flat' + img) iraf.ccdred.flatcombine('"@_oflatlist"', output='flat' + img, combine='average', reject='none', ccdtype=' ', rdnoise=_rdnoise, gain=_gain, process='no', Stdout=1) floyds.util.delete('_oflatlist') flatfile = 'flat' + img elif len(flatgood) == 1: os.system('cp ' + flatgood[0] + ' flat' + img) flatfile = 'flat' + img else: flatfile = '' ########################## find arcfile ####################### arcfile = '' if _listarc: arcfile = [floyds.util.searcharc(img, _listarc)[0] ][0] # take arc from list if not arcfile and setup in arclist.keys(): if _grpid in arclist[setup]: print '\n###ARC WITH SAME GRPID' arcfile = arclist[setup][ _grpid] # flat in the raw data else: arcfile = [] for _grpid0 in arclist[setup].keys(): for ii in arclist[setup][_grpid0]: arcfile.append(ii) if arcfile: if len(arcfile) > 1: # more than one arc available print arcfile # _arcclose=floyds.util.searcharc(imgex,arcfile)[0] # take the closest in time _arcclose = floyds.sortbyJD(arcfile)[ -1] # take the last arc of the sequence if _interactive.upper() in ['YES', 'Y']: for ii in floyds.floydsspecdef.sortbyJD(arcfile): print '\n### ', ii arcfile = raw_input( '\n### more than one arcfile available, which one to use [' + str(_arcclose) + '] ? ') if not arcfile: arcfile = _arcclose else: arcfile = _arcclose else: arcfile = arcfile[0] else: print '\n### Warning: no arc found' ################################################################### rectify if setup[0] == 'red': fcfile = floyds.__path__[ 0] + '/standard/ident/fcrectify_' + _tel + '_red' fcfile1 = floyds.__path__[ 0] + '/standard/ident/fcrectify1_' + _tel + '_red' print fcfile else: fcfile = floyds.__path__[ 0] + '/standard/ident/fcrectify_' + _tel + '_blue' fcfile1 = floyds.__path__[ 0] + '/standard/ident/fcrectify1_' + _tel + '_blue' print fcfile print img, arcfile, flatfile img0 = img if img and img not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(img) if arcfile and arcfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(arcfile) if flatfile and flatfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(flatfile) img, arcfile, flatfile = floyds.floydsspecdef.rectifyspectrum( img, arcfile, flatfile, fcfile, fcfile1, 'no', _cosmic) if img and img not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(img) if arcfile and arcfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(arcfile) if flatfile and flatfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(flatfile) ################################################################### check wavecalib if tpe == 'std' or floyds.util.readkey3( floyds.util.readhdr(img), 'exptime') < 300: if setup[0] == 'red': print '\n### check standard wave calib' data, hdr = fits.getdata(img, 0, header=True) y = data.mean(1) import numpy as np if np.argmax(y) < 80 and np.argmax(y) > 15: y2 = data[np.argmax(y) - 3:np.argmax(y) + 3].mean(0) yy2 = data[np.argmax(y) - 9:np.argmax(y) - 3].mean(0) floyds.util.delete('_std.fits') fits.writeto('_std.fits', np.float32(y2 - yy2), hdr) shift = floyds.floydsspecdef.checkwavestd( '_std.fits', _interactive, 2) zro = hdr['CRVAL1'] floyds.util.updateheader( img, 0, {'CRVAL1': [zro + int(shift), '']}) floyds.util.updateheader( img, 0, {'shift': [float(shift), '']}) floyds.util.delete('_std.fits') else: print 'object not found' else: print '\n### warning check in wavelength not possible for short exposure in the blu range ' else: print '\n### check object wave calib' _skyfile = floyds.__path__[ 0] + '/standard/ident/sky_' + setup[0] + '.fits' data, hdr = fits.getdata(img, 0, header=True) y = data.mean(1) import numpy as np if np.argmax(y) < 80 and np.argmax(y) > 15: yy1 = data[10:np.argmax(y) - 9].mean(0) yy2 = data[np.argmax(y) + 9:-10].mean(0) floyds.util.delete('_sky.fits') fits.writeto('_sky.fits', np.float32(yy1 + yy2), hdr) shift = floyds.floydsspecdef.checkwavelength_obj( '_sky.fits', _skyfile, _interactive, usethirdlayer=False) floyds.util.delete('_sky.fits') zro = hdr['CRVAL1'] floyds.util.updateheader( img, 0, {'CRVAL1': [zro + int(shift), '']}) floyds.util.updateheader(img, 0, {'shift': [float(shift), '']}) else: print 'object not found' #################################################### flat field if img and flatfile and setup[0] == 'red': imgn = 'n' + img hdr1 = floyds.readhdr(img) hdr2 = floyds.readhdr(flatfile) _grpid1 = floyds.util.readkey3(hdr1, 'grpid') _grpid2 = floyds.util.readkey3(hdr2, 'grpid') if _grpid1 == _grpid2: print flatfile, img, setup[0] imgn = floyds.fringing_classicmethod2( flatfile, img, 'no', '*', 15, setup[0]) else: print 'Warning flat not the same OB' imgex = floyds.floydsspecdef.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, tpe, automaticex=_automaticex) floyds.delete('flat' + imgex) iraf.specred.apsum(flatfile,output='flat'+imgex,referen=img,interac='no',find='no',recente='no',resize='no',\ edit='no',trace='no',fittrac='no',extract='yes',extras='no',review='no',backgro='none') fringingmask = floyds.normflat('flat' + imgex) print '\n### fringing correction' print imgex, fringingmask imgex, scale, shift = floyds.correctfringing_auto( imgex, fringingmask) # automatic correction shift = int( .5 + float(shift) / 3.5) # shift from correctfringing_auto in Angstrom print '\n##### flat scaling: ', str(scale), str(shift) ######################################################## datax, hdrx = fits.getdata(flatfile, 0, header=True) xdim = hdrx['NAXIS1'] ydim = hdrx['NAXIS2'] iraf.specred.apedit.nsum = 15 iraf.specred.apedit.width = 100. iraf.specred.apedit.line = 1024 iraf.specred.apfind.minsep = 20. iraf.specred.apfind.maxsep = 1000. iraf.specred.apresize.bkg = 'no' iraf.specred.apresize.ylevel = 0.5 iraf.specred.aptrace.nsum = 10 iraf.specred.aptrace.step = 10 iraf.specred.aptrace.nlost = 10 floyds.util.delete('n' + flatfile) floyds.util.delete('norm.fits') floyds.util.delete('n' + img) floyds.util.delete(re.sub('.fits', 'c.fits', flatfile)) iraf.imcopy(flatfile + '[500:' + str(xdim) + ',*]', re.sub('.fits', 'c.fits', flatfile), verbose='no') iraf.imarith(flatfile, '/', flatfile, 'norm.fits', verbose='no') flatfile = re.sub('.fits', 'c.fits', flatfile) floyds.util.delete('n' + flatfile) iraf.unlearn(iraf.specred.apflatten) floyds.floydsspecdef.aperture(flatfile) iraf.specred.apflatten(flatfile,output='n'+flatfile,interac=_interactive,find='no',recenter='no', resize='no',edit='no',trace='no',\ fittrac='no',fitspec='no', flatten='yes', aperture='',\ pfit='fit2d',clean='no',function='legendre',order=15,sample = '*', mode='ql') iraf.imcopy('n' + flatfile, 'norm.fits[500:' + str(xdim) + ',*]', verbose='no') floyds.util.delete('n' + flatfile) floyds.util.delete('n' + img) iraf.imrename('norm.fits', 'n' + flatfile, verbose='no') imgn = floyds.floydsspecdef.applyflat( img, 'n' + flatfile, 'n' + img, scale, shift) else: imgn = '' if imgn and imgn not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgn) ################################################### 2D flux calib hdr = floyds.util.readhdr(img) _sens = '' if liststandard: _sens = floyds.util.searchsens( img, liststandard)[0] # search in the list from reducer if not _sens: try: _sens = floyds.util.searchsens( img, sens[setup])[0] # search in the reduced data except: _sens = floyds.util.searchsens( img, '')[0] # search in tha archive if _sens: if _sens[0] == '/': os.system('cp ' + _sens + ' .') _sens = string.split(_sens, '/')[-1] imgd = fluxcalib2d(img, _sens) if imgn: imgdn = fluxcalib2d(imgn, _sens) else: imgdn = '' if _sens not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(_sens) else: imgdn = '' print '\n### do 2D calibration' else: imgd = '' imgdn = '' ################ extraction #################################### if imgdn: try: imgdnex = floyds.floydsspecdef.extractspectrum( imgdn, dv, _ext_trace, _dispersionline, _interactive, tpe, automaticex=_automaticex) except Exception as e: print 'failed to extract', imgdn print e imgdnex = '' else: imgdnex = '' if imgd: try: imgdex = floyds.floydsspecdef.extractspectrum( imgd, dv, _ext_trace, _dispersionline, _interactive, tpe, automaticex=_automaticex) except Exception as e: print 'failed to extract', imgd print e imgdex = '' else: imgdex = '' if imgd and imgd not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgd) if imgdn and imgdn not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdn) if imgdnex and imgdnex not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdnex) if imgdex and imgdex not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdex) if tpe == 'std': if imgn: try: imgnex = floyds.floydsspecdef.extractspectrum( imgn, dv, _ext_trace, _dispersionline, _interactive, tpe, automaticex=_automaticex) except Exception as e: print 'failed to extract', imgn print e imgnex = '' elif img: try: imgnex = floyds.floydsspecdef.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, tpe, automaticex=_automaticex) except Exception as e: print 'failed to extract', img print e imgnex = '' if imgnex: hdrs = floyds.util.readhdr(imgnex) _tel = floyds.util.readkey3(hdrs, 'TELID') try: _outputsens2='sens_'+_tel+'_'+str(floyds.util.readkey3(hdrs,'date-night'))+'_'+str(floyds.util.readkey3(hdrs,'grism'))+\ '_'+re.sub('.dat','',floyds.util.readkey3(hdrs,'stdname'))+'_'+str(MJDtoday) except: sys.exit( 'Error: missing header -stdname- in standard ' + str(standardfile) + ' ') print '\n### compute sensitivity function and atmofile' if setup[0] == 'red': atmofile = floyds.floydsspecdef.telluric_atmo( imgnex) if atmofile and atmofile not in outputfile[tpe][ archfile]: outputfile[tpe][archfile].append(atmofile) stdusedclean = re.sub('_ex.fits', '_clean.fits', imgnex) floyds.util.delete(stdusedclean) _function = 'spline3' iraf.specred.sarith(input1=imgnex, op='/', input2=atmofile, output=stdusedclean, format='multispec') try: _outputsens2 = floyds.floydsspecdef.sensfunction( stdusedclean, _outputsens2, _function, 8, _interactive) except: print 'Warning: problem computing sensitivity function' _outputsens2 = '' if setup not in atmo: atmo[setup] = [atmofile] else: atmo[setup].append(atmofile) else: _function = 'spline3' try: _outputsens2 = floyds.floydsspecdef.sensfunction( imgnex, _outputsens2, _function, 12, _interactive, '3400:4700') #,3600:4300') except: print 'Warning: problem computing sensitivity function' _outputsens2 = '' if _outputsens2 and _outputsens2 not in outputfile[ tpe][archfile]: outputfile[tpe][archfile].append(_outputsens2) ################################################### if 'obj' in outputfile: for imm in outputfile['obj']: lista = [] tt_red = '' ntt_red = '' tt_blue = '' for f in outputfile['obj'][imm]: if '_ex.fits' in f and '_blue_' in f: tt_blue = f elif '_ex.fits' in f and f[:3] == 'ntt': ntt_red = f elif '_ex.fits' in f and f[:2] == 'tt': tt_red = f else: lista.append(f) merged = ntt_red.replace('_red_', '_merge_') if tt_blue and ntt_red: floyds.floydsspecdef.combspec2(tt_blue, ntt_red, merged, scale=True, num=None) if os.path.isfile(merged): lista.append(merged) floyds.util.delete(tt_blue) floyds.util.delete(tt_red) floyds.util.delete(ntt_red) else: if tt_blue: lista.append(tt_blue) if tt_red: lista.append(tt_red) if ntt_red: lista.append(ntt_red) outputfile['obj'][imm] = lista readme = floyds.floydsspecauto.writereadme() return outputfile, readme
def makeillumination(lista,flatfield):#,outputfile,illum_frame): import os,glob,string,re from astropy.io import fits as pyfits import ntt from ntt.util import readhdr, readkey3, delete, display_image, defsex, name_duplicate, correctcard from numpy import compress, array, argmax, argmin, min, argsort, float32 import datetime MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days _date = readkey3(readhdr(lista[0]), 'date-night') _filter = readkey3(readhdr(lista[0]), 'filter') illum_frame = name_duplicate( lista[0], 'illum_' + _date + '_' + _filter + '_' + str(MJDtoday), '') from pyraf import iraf iraf.images(_doprint=0) iraf.imutil(_doprint=0) iraf.utilities(_doprint=0) iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.digiphot(_doprint=0) iraf.daophot(_doprint=0) iraf.generic(_doprint=0) toforget = ['digiphot.daophot', 'imutil.imarith', 'image', 'utilities.surfit'] for t in toforget: iraf.unlearn(t) n = len(lista) # start loop to read image names from the input file lista1 = [] iraf.ccdred.verbose = 'no' ff = open('templist.lst', 'w') for i in range(0, len(lista)): ff.write('C' + lista[i] + '\n') delete('C' + lista[i]) delete('C' + re.sub('.fits', '_sub.fits', lista[i])) ntt.sofiphotredudef.crosstalk(lista[i], 'C' + lista[i]) iraf.noao.imred.ccdred.ccdproc('C' + lista[i], output='', overscan="no", trim="yes", ccdtype='', darkcor='no', fixpix='no', zerocor="no", flatcor='yes', illumco='no', trimsec='[1:1024,1:1007]', biassec='', flat=flatfield, illum='') correctcard('C' + lista[i]) lista1.append('C' + lista[i]) ff.close() print '\n### prereducing STD frames to compute illumination correction ........' lista2, skyfile = ntt.sofiphotredudef.skysub(lista1, readkey3( readhdr(lista1[0]), 'ron'), readkey3(readhdr(lista1[0]), 'gain'), True) lista2 = ntt.sofiphotredudef.sortbyJD(lista2) print '\n### use x on the star and q to continue....' display_image(lista2[0], 2, '', '', False) delete('tmpone.coo') iraf.image.tv.imexamine(lista2[0], 2, logfile='tmpone.coo', keeplog='yes', xformat='', yformat='', wcs='logical') iraf.tvmark(2, 'tmpone.coo', mark="circle", number='yes', label='no', radii=8, nxoffse=5, nyoffse=5, color=204, txsize=2) xycoo = iraf.proto.fields('tmpone.coo', '1,2', Stdout=1) x0, y0 = string.split(xycoo[0]) x0 = float(x0) y0 = float(y0) xcum0 = readkey3(readhdr(lista2[0]), 'xcum') ycum0 = readkey3(readhdr(lista2[0]), 'ycum') iraf.digiphot(_doprint=0) iraf.daophot(_doprint=0) iraf.noao.digiphot.daophot.datapars.datamin = -1000 iraf.noao.digiphot.daophot.datapars.datamax = 60000 iraf.noao.digiphot.daophot.daopars.function = 'gauss' iraf.noao.digiphot.daophot.photpars.zmag = 0 namesex = defsex('default.sex') for i in range(0, len(lista2)): j = i + 1 xcum = readkey3(readhdr(lista2[i]), 'xcum') ycum = readkey3(readhdr(lista2[i]), 'ycum') xx = x0 - xcum0 + xcum yy = y0 - ycum0 + ycum # sex objects os.system('sex ' + lista2[i] + ' -c ' + namesex + '> _logsex') delete('_logsex') xpix = iraf.proto.fields('detections.cat', fields='2', Stdout=1) ypix = iraf.proto.fields('detections.cat', fields='3', Stdout=1) cm = iraf.proto.fields('detections.cat', fields='4', Stdout=1) cm = compress((array(xpix) != ''), array(cm, float)) ypix = compress((array(xpix) != ''), array(ypix, float)) xpix = compress((array(xpix) != ''), array(xpix, float)) if len(xpix) > 300: num = 300 else: num = len(xpix) - 1 xpix = xpix[argsort(cm)][0:num] ypix = ypix[argsort(cm)][0:num] distance = (ypix - yy)**2 + (xpix - xx)**2 xx1, yy1 = xpix[argmin(distance)], ypix[argmin(distance)] f = open('tmpone.coo', 'w') f.write(str(xx1) + ' ' + str(yy1) + '\n') f.close() display_image(lista2[i], 1, '', '', False) iraf.tvmark(1, 'tmpone.coo', mark="circle", number='yes', label='no', radii=8, nxoffse=5, nyoffse=5, color=204, txsize=2) answ = 'n' while answ != 'y': answ = raw_input('selected the right one [[y]/n] ?') if not answ: answ = 'y' if answ in ['y', 'YES', 'yes', 'Y']: print lista2[i] delete('pippo.' + str(j) + '.mag') gggg = iraf.digiphot.daophot.phot( lista2[i], "tmpone.coo", output="pippo." + str(j) + ".mag", verify='no', interac='no', Stdout=1) try: float(string.split(gggg[0])[3]) answ = 'y' except: print '\n### warning' answ = 'n' else: print '\n### select the std star' display_image(lista2[i], 1, '', '', False) iraf.image.tv.imexamine(lista2[ i], 1, logfile='tmpone.coo', keeplog='yes', xformat='', yformat='', wcs='logical') xycoo = iraf.proto.fields('tmpone.coo', '1,2', Stdout=1) x2, y2 = string.split(xycoo[0]) f = open('tmpone.coo', 'w') f.write(str(x2) + ' ' + str(y2) + '\n') f.close() delete('pippo.' + str(j) + '.mag') print '###### new selection ' + str(x2), str(y2) gggg = iraf.digiphot.daophot.phot( lista2[i], "tmpone.coo", output='pippo.' + str(j) + '.mag', verify='no', interac='no', Stdout=1) try: float(string.split(gggg[0])[3]) answ = 'y' except: print '\n### warning' answ = 'n' os.system('ls pippo.*.mag > tempmag.lst') tmptbl0 = iraf.txdump(textfile="@tempmag.lst", fields="XCENTER,YCENTER,FLUX", expr='yes', Stdout=1) ff = open('magnitudini', 'w') for i in tmptbl0: ff.write(i + '\n') ff.close() # delete the temporary images and files delete("temp*.fits") delete('temp*.lst') delete(illum_frame) print '\n### fitting the illumination surface...' aaa = iraf.utilities.surfit('magnitudini', image=illum_frame, function="polynomial", xorder=2, yorder=2, xterms="full", ncols=1024, nlines=1024, Stdout=1) iraf.noao.imred.generic.normalize(illum_frame) correctcard(lista[0]) data, hdr = pyfits.getdata(illum_frame, 0, header=True) data0, hdr0 = pyfits.getdata(lista[0], 0, header=True) delete(illum_frame) pyfits.writeto(illum_frame, float32(data), hdr0) flatfield0 = string.split(flatfield, '/')[-1] ntt.util.updateheader( illum_frame, 0, {'MKILLUM': [flatfield0, 'flat field']}) display_image(illum_frame, 1, '', '', False) for i in range(0, len(lista)): # in lista: img = lista[i] delete('pippo.' + str(i) + '.mag') delete('C' + img) delete('C' + re.sub('.fits', '_sky.fits', img)) # delete('C*.fits.mag.1') # iraf.hedit(illum_frame,'MKILLUM','Illum. corr. created '+flatfield,add='yes',update='yes',verify='no') return illum_frame
def coroverbiastrim(lstfile): iraf.noao() iraf.imred() iraf.ccdred() x1, x2, y1, y2 = get_trim_sec() iraf.ccdproc(images='@' + lstfile + '//[1]', output='%bo%bo%@' + lstfile, ccdtype='', max_cache=0, noproc=False, fixpix=False, overscan=True, trim=False, zerocor=True, darkcor=False, flatcor=False, illumcor=False, fringecor=False, readcor=False, scancor=False, readaxis='line', fixfile='', biassec='[5:45,%s:%s]' % (y1, y2), trimsec='[%s:%s,%s:%s]' % (x1, x2, y1, y2), zero='Zero', dark='', flat='', illum='', fringe='', minreplace=1.0, scantype='shortscan', nscan=1, interactive=False, function='chebyshev', order=1, sample='*', naverage=1, niterate=1, low_reject=3.0, high_reject=3.0, grow=1.0) iraf.ccdproc(images='%bo%bo%@' + lstfile, output='%tbo%tbo%@' + lstfile, ccdtype='', max_cache=0, noproc=False, fixpix=False, overscan=False, trim=True, zerocor=False, darkcor=False, flatcor=False, illumcor=False, fringecor=False, readcor=False, scancor=False, readaxis='line', fixfile='', biassec='[5:45,%s:%s]' % (y1, y2), trimsec='[%s:%s,%s:%s]' % (x1, x2, y1, y2), zero='Zero', dark='', flat='', illum='', fringe='', minreplace=1.0, scantype='shortscan', nscan=1, interactive=False, function='chebyshev', order=1, sample='*', naverage=1, niterate=1, low_reject=3.0, high_reject=3.0, grow=1.0) iraf.flpr()
--------- """ import os import sys import pyfits import numpy as np #Load the IRAF packages we'll need try: current_dir = os.getcwd() if os.getlogin() == 'Arthur': os.chdir('/Users/Arthur/Ureka/iraf/local') from pyraf import iraf os.chdir(current_dir) iraf.imred(_doprint=0) iraf.hydra(_doprint=0) except Exception as e: print "Failure: could not find pyraf/iraf" sys.exit(1) if os.getlogin() == 'Arthur': APIDTABLE = '/Users/Arthur/Documents/School/MetaPak/gradpak_sizes.iraf' else: APIDTABLE = '/usr/users/eigenbrot/research/Pak/gradpak_sizes.iraf' def scale_images(hdulist): """Take in a list of fits HDUS and scale the data in all of them to the exposure time of the first HDU I think this function is no longer used, so I won't document it. """
import pyraf from pyraf import iraf import astropy.io.fits as pyfits import astropy.coordinates from scipy import optimize import numpy as np import glob, os, shutil, re import iqpkg from iqutils import * # Necessary packages iraf.images() iraf.immatch() #iraf.imfilter() iraf.noao() iraf.imred() iraf.ccdred() iraf.stsdas() iraf.hst_calib() iraf.nicmos() iraf.imutil() yes = iraf.yes no = iraf.no INDEF = iraf.INDEF globclob = yes globver = yes RATIRFILTS = ["i"] RATIRPIXSCALE = {"C0": 0.32, "C1": 0.32, "C2": 0.30, "C3": 0.30} RATIRFLAT = "evening flats (bright)"
def create_masterbias(biasdir=None, channel='rc'): ''' Combines slow and fast readout mode biases for the specified channel. ''' iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) if (biasdir == None) or biasdir=="": biasdir = "." outs = "Bias_%s_slow.fits"%channel outf = "Bias_%s_fast.fits"%channel if (os.path.isfile(os.path.join(biasdir,outs)) and os.path.isfile(os.path.join(biasdir,outf))): print "Master Bias exists!" return else: print "Starting the Master Bias creation!" os.chdir(biasdir) lfastbias = [] lslowbias = [] #Select all filts that are Bias with same instrument for f in glob.glob("*fits"): try: if (channel == fitsutils.get_par(f, "CHANNEL") and "BIAS" in str.upper(fitsutils.get_par(f, "OBJECT")) ): if (fitsutils.get_par(f, "ADCSPEED")==2): lfastbias.append(f) else: lslowbias.append(f) except: pass print "Files for bias SLOW mode: ", lslowbias print "Files for bias FAST mode: ", lfastbias if len(lfastbias) > 0: bfile_fast ="lbias_fast_"+channel np.savetxt(bfile_fast, np.array(lfastbias), fmt="%s") if (os.path.isfile("Bias_stats_fast")): os.remove("Bias_stats_fast") iraf.imstat("@"+bfile_fast, Stdout="Bias_stats_fast") st = np.genfromtxt("Bias_stats_fast", names=True, dtype=None) print st iraf.imcombine(input = "@"+bfile_fast, \ output = outf, \ combine = "median",\ scale = "mode") os.remove(bfile_fast) if len(lslowbias) > 0: bfile_slow ="lbias_slow_"+channel np.savetxt(bfile_slow, np.array(lslowbias), fmt="%s") if (os.path.isfile("Bias_stats_slow")): os.remove("Bias_stats_slow") iraf.imstat("@"+bfile_slow, Stdout="Bias_stats_slow") st = np.genfromtxt("Bias_stats_slow", names=True, dtype=None) print st iraf.imcombine(input = "@"+bfile_slow, \ output = outs, \ combine = "median",\ scale = "mode") os.remove(bfile_slow)
def efoscfastredu(imglist, _listsens, _listarc, _ext_trace, _dispersionline, _cosmic, _interactive): # print "LOGX:: Entering `efoscfastredu` method/function in %(__file__)s" # % globals() import string import os import re import sys os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits from ntt.util import readhdr, readkey3 import ntt import numpy as np dv = ntt.dvex() scal = np.pi / 180. if not _interactive: _interactive = False _inter = 'NO' else: _inter = 'YES' from pyraf import iraf iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) toforget = ['ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext'] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' # not print steps iraf.specred.verbose = 'no' # not print steps iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' _gain = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'gain') _ron = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'ron') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain iraf.specred.dispaxi = 2 iraf.longslit.dispaxi = 2 iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" iraf.set(direc=ntt.__path__[0] + '/') for img in imglist: hdr = ntt.util.readhdr(img) _tech = ntt.util.readkey3(hdr, 'tech') if _tech != 'SPECTRUM': sys.exit('error: ' + str(img) + ' is not a spectrum ') print '\n#### image name = ' + img + '\n' _grism0 = readkey3(hdr, 'grism') _filter0 = readkey3(hdr, 'filter') _slit0 = readkey3(hdr, 'slit') _object0 = readkey3(hdr, 'object') _date0 = readkey3(hdr, 'date-night') setup = (_grism0, _filter0, _slit0) _biassec0 = '[3:1010,1026:1029]' if _grism0 == 'Gr16': _trimsec0 = '[100:950,1:950]' elif _grism0 == 'Gr13': if _filter0 == 'Free': _trimsec0 = '[100:950,1:1015]' elif _filter0 == 'GG495': _trimsec0 = '[100:950,208:1015]' elif _filter0 == 'OG530': _trimsec0 = '[100:950,300:1015]' elif _grism0 == 'Gr11': _trimsec0 = '[100:950,5:1015]' else: _trimsec0 = '[100:950,5:1015]' _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = 't' + str(_object0) + '_' + str(_date0) for _set in setup: nameout0 = nameout0 + '_' + _set nameout0 = ntt.util.name_duplicate(img, nameout0, '') timg = nameout0 if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) iraf.ccdproc(timg, output='', overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=_biassec0, Stdout=1) img = timg if _listarc: arcfile = ntt.util.searcharc(img, _listarc)[0] else: arcfile = '' if not arcfile: arcfile = ntt.util.searcharc(img, '')[0] else: iraf.ccdproc(arcfile, output='t' + arcfile, overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) arcfile = 't' + arcfile if _cosmic: # print cosmic rays rejection ntt.cosmics.lacos(img, output='', gain=_gain, readn=_ron, xorder=9, yorder=9, sigclip=4.5, sigfrac=0.5, objlim=1, verbose=True, interactive=False) print '\n### cosmic rays rejections ........ done ' if not arcfile: print '\n### warning no arcfile \n exit ' else: arcref = ntt.util.searcharc(img, '')[0] if arcfile[0] == '/': os.system('cp ' + arcfile + ' ' + string.split(arcfile, '/')[-1]) arcfile = string.split(arcfile, '/')[-1] arcref = string.split(arcref, '/')[-1] if arcref: os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] if not os.path.isdir('database/'): os.mkdir('database/') if os.path.isfile(ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref)): os.system('cp ' + ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref) + ' database/') iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac=_inter, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=0, newaps='no', nsum=5, nlost=2, mode='h', verbose='no') else: iraf.longslit.identify(images=arcfile, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', nsum=10, fwidth=7, order=3, mode='h') iraf.longslit.reident(referenc=arcfile, images=arcfile, interac='NO', section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=10, newaps='yes', nsum=5, nlost=2, mode='h', verbose='no') qqq = iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile), fitname=re.sub('.fits', '', arcfile), interac='no', combine='yes', databas='database', function='legendre', yorder=4, logfile='logfile', plotfil='', mode='h') iraf.specred.transform(input=img, output=img, minput='', fitnames=re.sub('.fits', '', arcfile), databas='database', x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h', logfile='logfile') # ###################### check wavelength calibration ############ _skyfile = ntt.__path__[ 0] + '/standard/ident/sky_' + setup[0] + '_' + setup[1] + '.fits' shift = ntt.efoscspec2Ddef.skyfrom2d(img, _skyfile) print '\n### check in wavelengh performed ...... spectrum shifted of ' + str(shift) + ' Angstrom \n' zro = pyfits.open(img)[0].header.get('CRVAL2') ntt.util.updateheader(img, 0, {'CRVAL2': [zro + int(shift), '']}) std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') hdrt = readhdr(img) _ra = readkey3(hdrt, 'RA') _dec = readkey3(hdrt, 'DEC') _object = readkey3(hdrt, 'object') dd = np.arccos(np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos((_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' ntt.util.updateheader( img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) else: _type = 'obj' print '\n### EXTRACTION USING IRAF TASK APALL \n' result = [] if _type == 'obj': imgex = ntt.util.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, _type) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader(imgex, 0, { 'PRODCATG': ['SCIENCE.' + readkey3(readhdr(imgex), 'tech').upper(), 'Data product category']}) ntt.util.updateheader(imgex, 0, {'TRACE1': [img, '']}) result.append(imgex) if _listsens: sensfile = ntt.util.searchsens(img, _listsens)[0] else: sensfile = '' if not sensfile: sensfile = ntt.util.searchsens(img, '')[0] if sensfile: imgf = re.sub('.fits', '_f.fits', img) _extinctdir = 'direc$standard/extinction/' _extinction = 'extinction_lasilla.dat' _observatory = 'lasilla' _exptime = readkey3(hdrt, 'exptime') _airmass = readkey3(hdrt, 'airmass') ntt.util.delete(imgf) iraf.specred.calibrate(input=imgex, output=imgf, sensiti=sensfile, extinct='yes', flux='yes', ignorea='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, exptime=_exptime, fnu='no') hedvec = {'SENSFUN': [string.split(sensfile, '/')[-1], 'sensitivity function'], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum '], 'SNR': [ntt.util.StoN2(imgf, False), 'Average S/N ratio'], 'BUNIT': ['erg/cm2/s/Angstrom', 'Flux Calibration Units'], 'TRACE1': [imgex, '']} ntt.util.updateheader(imgf, 0, hedvec) imgout = imgf imgd = ntt.efoscspec1Ddef.fluxcalib2d(img, sensfile) ntt.util.updateheader( imgd, 0, {'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum ']}) ntt.util.updateheader(imgd, 0, {'TRACE1': [img, '']}) imgasci = re.sub('.fits', '.asci', imgout) ntt.util.delete(imgasci) iraf.onedspec.wspectext( imgout + '[*,1,1]', imgasci, header='no') result = result + [imgout, imgd, imgasci] else: imgex = ntt.util.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, 'std') imgout = ntt.efoscspec1Ddef.sensfunction( imgex, 'spline3', 6, _inter) result = result + [imgout] for img in result: if img[-5:] == '.fits': ntt.util.phase3header(img) # phase 3 definitions ntt.util.airmass(img) # phase 3 definitions ntt.util.updateheader( img, 0, {'quality': ['Rapid', 'Final or Rapid reduction']}) return result
def reduce_image(img, flatdir=None, biasdir=None, cosmic=True, astrometry=True, channel='rc', target_dir='reduced'): ''' Applies Flat field and bias calibrations to the image. Steps: 1. - Solve astrometry on the entire image. 2. - Compute master bias and de-bias the image. 3. - Separate the image into 4 filters. 4. - Compute flat field for each filter and apply flat fielding on the image. 5. - Computes cosmic ray rejectionon the entire image. 6. - Compute zeropoint for each image and store in a log file. 7. - Plot zeropoint for the night. ''' print "Reducing image ", img objectname = fitsutils.get_par(img, "OBJECT").replace(" ", "").replace("]","").replace("[", "") print "For object", objectname #Change to image directory mydir = os.path.dirname(img) if mydir=="": mydir = "." mydir = os.path.abspath(mydir) os.chdir(mydir) #Create destination directory if (not os.path.isdir(target_dir)): os.makedirs(target_dir) #Rename to the image name only img = os.path.basename(img) if (astrometry): print "Solving astometry for the whole image..." img = solve_astrometry(img) astro = "a_" else: astro = "" #Compute BIAS if (biasdir == None or biasdir==""): biasdir = "." create_masterbias(biasdir) bias_slow = os.path.join(biasdir, "Bias_%s_%s.fits"%(channel, 'slow')) bias_fast = os.path.join(biasdir, "Bias_%s_%s.fits"%(channel, 'fast')) # Running IRAF to DE-BIAS iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) #Compute flat field if (flatdir == None or flatdir==""): flatdir = "." create_masterflat(flatdir, biasdir) #New names for the object. debiased = "b_" + astro + img print "Creating debiased file, ",debiased if (not os.path.isfile(bias_slow) or not os.path.isfile(bias_fast)): print "Master bias not found!" return #Debias if (fitsutils.get_par(img, "ADCSPEED")==2): iraf.imarith(img, "-", bias_fast, debiased) fitsutils.update_par(debiased, "BIASFILE", bias_fast) fitsutils.update_par(debiased, "RDNOISE", 20.) else: iraf.imarith(img, "-", bias_slow, debiased) fitsutils.update_par(debiased, "BIASFILE", bias_slow) fitsutils.update_par(debiased, "RDNOISE", 4.) #Set negative counts to zero hdu = pf.open(debiased) header = hdu[0].header hdu[0].data[hdu[0].data<0] = 0 hdu.writeto(debiased, clobber=True) #Slicing the image for flats slice_names = slice_rc(debiased) #Remove un-sliced image os.remove(debiased) # DE-flat each filter and store under object name for i, debiased_f in enumerate(slice_names): b = fitsutils.get_par(debiased_f, 'filter') deflatted = "f_b_" + astro + objectname + "_%s.fits"%b #Flat to be used for that filter flat = os.path.join(flatdir, "Flat_%s_%s_norm.fits"%(channel, b)) if (not os.path.isfile(flat)): print "Master flat not found in", flat return #Cleans the deflatted file if exists if (os.path.isfile(deflatted)): os.remove(deflatted) iraf.imarith(debiased_f, "/", flat, deflatted) #Removes the de-biased file os.remove(debiased_f) print "Updating header with original filename and flat field used." fitsutils.update_par(deflatted, "ORIGFILE", img) fitsutils.update_par(deflatted, "FLATFILE", flat) slice_names[i] = deflatted if (cosmic): print "Correcting for cosmic rays..." # Correct for cosmics each filter for i, deflatted in enumerate(slice_names): cclean = "c_" +name clean_cosmic(os.path.join(os.path.abspath(mydir), deflatted), cclean) slice_names[i] = cclean #Moving files to the target directory for name in slice_names: if (os.path.isfile(name)): shutil.move(name, os.path.join(target_dir, name))
def extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, _type, automaticex=False): # print "LOGX:: Entering `extractspectrum` method/function in # %(__file__)s" % globals() import glob import os import string import sys import re import lickshane import datetime MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days from pyraf import iraf iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) toforget = ['specred.apall', 'specred.transform'] for t in toforget: iraf.unlearn(t) dv = lickshane.util.dvex() hdr = lickshane.util.readhdr(img) _gain = lickshane.util.readkey3(hdr, 'gain') _rdnoise = lickshane.util.readkey3(hdr, 'ron') _grism = lickshane.util.readkey3(hdr, 'grism') _instrument = lickshane.util.readkey3(hdr, 'version') imgex = re.sub('.fits', '_ex.fits', img) imgfast = re.sub(string.split(img, '_')[-2] + '_', '', img) # imgfast=re.sub(str(MJDtoday)+'_','',img) if not os.path.isfile(imgex) and not os.path.isfile( 'database/ap' + re.sub('.fits', '', img)) and not os.path.isfile( 'database/ap' + re.sub('.fits', '', imgfast)): _new = 'yes' _extract = 'yes' else: if automaticex: if _interactive in ['Yes', 'yes', 'YES', 'y', 'Y']: answ = 'x' while answ not in ['o', 'n', 's']: answ = raw_input( '\n### New extraction [n], extraction with old parameters [o], skip extraction [s] ? [o]' ) if not answ: answ = 'o' if answ == 'o': _new, _extract = 'no', 'yes' elif answ == 'n': _new, _extract = 'yes', 'yes' else: _new, _extract = 'yes', 'no' else: _new, _extract = 'no', 'yes' else: if _interactive in ['Yes', 'yes', 'YES', 'y', 'Y']: answ = 'x' while answ not in ['y', 'n']: answ = raw_input( '\n### do you want to extract again [[y]/n] ? ') if not answ: answ = 'y' if answ == 'y': _new, _extract = 'yes', 'yes' else: _new, _extract = 'yes', 'no' else: _new, _extract = 'yes', 'yes' if _extract == 'yes': lickshane.util.delete(imgex) if _dispersionline: question = 'yes' while question == 'yes': _z1, _z2, goon = lickshane.util.display_image( img, 1, '', '', False) dist = raw_input( '\n### At which line do you want to extract the spectrum [' + str(dv['line'][_grism]) + '] ? ') if not dist: dist = 400 try: dist = int(dist) question = 'no' except: print '\n### input not valid, try again:' else: dist = dv['line'][_grism] if _ext_trace in ['yes', 'Yes', 'YES', True]: lista = glob.glob('*ex.fits') if lista: for ii in lista: print ii _reference = raw_input( '\### which object do you want to use for the trace [' + str(lista[0]) + '] ? ') if not _reference: _reference = lista[0] _reference = re.sub('_ex', '', _reference) _fittrac = 'no' _trace = 'no' else: sys.exit('\n### error: no extracted spectra in the directory') else: _reference = '' _fittrac = 'yes' _trace = 'yes' if _new == 'no': if not os.path.isfile('database/ap' + re.sub('.fits', '', img)): lickshane.util.repstringinfile( 'database/ap' + re.sub('.fits', '', imgfast), 'database/ap' + re.sub('.fits', '', img), re.sub('.fits', '', imgfast), re.sub('.fits', '', img)) _find = 'no' _recenter = 'no' _edit = 'no' _trace = 'no' _fittrac = 'no' _mode = 'h' _resize = 'no' _review = 'no' iraf.specred.mode = 'h' _interactive = 'no' else: iraf.specred.mode = 'q' _mode = 'q' _find = 'yes' _recenter = 'yes' _edit = 'yes' _review = 'yes' _resize = dv[_type]['_resize'] if _instrument == 'kastb': iraf.specred.dispaxi = 1 elif _instrument == 'kastr': iraf.specred.dispaxi = 2 iraf.specred.apall(img, output=imgex, referen=_reference, trace=_trace, fittrac=_fittrac, find=_find, recenter=_recenter, edit=_edit, nfind=1, extract='yes', backgro='fit', gain=_gain, readnoi=_rdnoise, lsigma=4, usigma=4, format='multispec', b_function='legendre', b_sample=dv[_type]['_b_sample'], clean='yes', pfit='fit1d', lower=dv[_type]['_lower'], upper=dv[_type]['_upper'], t_niter=dv[_type]['_t_niter'], width=dv[_type]['_width'], radius=dv[_type]['_radius'], line=dist, nsum=dv[_type]['_nsum'], t_step=dv[_type]['_t_step'], t_nsum=dv[_type]['_t_nsum'], t_nlost=dv[_type]['_t_nlost'], t_sample=dv[_type]['_t_sample'], resize=_resize, t_order=dv[_type]['_t_order'], weights=dv[_type]['_weights'], interactive=_interactive, review=_review, mode=_mode) lickshane.util.repstringinfile( 'database/ap' + re.sub('.fits', '', img), 'database/ap' + re.sub('.fits', '', imgfast), re.sub('.fits', '', img), re.sub('.fits', '', imgfast)) else: print '\n### skipping new extraction' return imgex