def normalise_flats(flatdir): """ Normalise flats """ print 'In directory ' + flatdir print 'Normalising combinined flats...' if os.path.exists( os.path.join(flatdir,'nFlat.fits') ): os.remove(os.path.join(flatdir,'nFlat.fits') ) print 'Removing file ' + os.path.join(flatdir,'nFlat.fits') iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.response.setParam('calibration', os.path.join(flatdir,'Flat.fits')) iraf.response.setParam('normalization', os.path.join( flatdir, 'Flat.fits' )) iraf.response.setParam('response', os.path.join( flatdir, 'nFlat') ) iraf.response.setParam('low_reject', 3.) iraf.response.setParam('high_reject', 3.) iraf.response.setParam('order',40) iraf.response() return None
def transform(lst): f = open(lst) l = f.readlines() f.close() namelst = ['ftbo' + i for i in l] outputlst = ['wftbo' + i for i in l] # namelst = [i.split('.')[0] + 'otbf.fits' for i in l] # outputlst = [i.split('.')[0] + 'otbfw.fits' for i in l] f = open("temp1.txt", 'w') for i in namelst: f.write(i + '\n') f.close() f = open("temp2.txt", 'w') for i in outputlst: f.write(i + '\n') f.close() iraf.twodspec() iraf.longslit(dispaxis=2) # for i in namelst: # print '#' * 30, i, '===>', i.split('.')[0] + 'w.fits' # iraf.transform(input = i, output = i.split('.')[0] + 'w.fits', # minput = '', moutput = '', fitnames = 'LampLamp', # database = 'database', interptype = 'spline3', # flux = 'yes') iraf.transform(input='@temp1.txt', output='@temp2.txt', minput='', moutput='', fitnames='LampLamp', database='database', interptype='spline3', flux='yes')
def reidentify(): iraf.twodspec() iraf.longslit() iraf.reidentify(reference='Lamp', images='Lamp', interactive='no', section='column', newaps='yes', override='yes', refit='yes', trace='no', step=10, nsum=10, shift=0.0, search=0.0, nlost=5, cradius=7.0, threshold=0.0, addfeatures='no', coordlist=cdherb_file, match=-3.0, maxfeatures=50, minsep=2.0, database='database') iraf.flpr()
def transform(lst): f = open(lst) l = f.readlines() f.close() namelst = ['ftb' + i for i in l] outputlst = ['wftb' + i for i in l] f = open("temp1.txt", 'w') for i in namelst: f.write(i + '\n') f.close() f = open("temp2.txt", 'w') for i in outputlst: f.write(i + '\n') f.close() iraf.twodspec() iraf.longslit(dispaxis=2) # iraf.transform(input='@temp1.txt', output='@temp2.txt', minput='', moutput='', fitnames='LampLamp', database='database', interptype='spline3', flux='yes')
def wal(lstfile): iraf.noao() iraf.twodspec() iraf.longslit() iraf.identify(images = 'Lamp' , section = 'middle column', database = 'database' , coordlist = 'linelists$idhenear.dat', units = '', nsum = 10 , match = -3.0, maxfeatures = 50, zwidth = 100.0 , ftype = 'emission', fwidth = 20.0, cradius = 5.0 , threshold = 0.0, minsep = 2.0, function = 'chebyshev' , order = 6, sample = '*', niterate = 0 , low_reject = 3.0, high_reject = 3.0, grow = 0.0 , autowrite = False, graphics = 'stdgraph', cursor = '' , crval = '', cdelt = '') iraf.reidentify(reference = 'Lamp' , images = 'Lamp', interactive = 'no', section = 'column' , newaps = True, override = True, refit = True, trace = False , step = 10, nsum = 10, shift = 0.0, search = 0.0, nlost = 5 , cradius = 7.0, threshold = 0.0, addfeatures = False , coordlist = 'linelists$idhenear.dat', match = -3.0, maxfeatures = 50 , minsep = 2.0, database = 'database', logfiles = 'logfile' , plotfile = '', verbose = False, graphics = 'stdgraph', cursor = '' , answer = 'yes', crval = '', cdelt = '', mode = 'al') iraf.fitcoords(images = 'Lamp' , fitname = 'Lamp', interactive = True, combine = False, database = 'database' , deletions = 'deletions.db', function = 'chebyshev', xorder = 6 , yorder = 6, logfiles = 'STDOUT,logfile', plotfile = 'plotfile' , graphics = 'stdgraph', cursor = '', mode = 'al') iraf.longslit(dispaxis = 2) iraf.transform(input = '%ftbo%ftbo%@' + lstfile , output = '%wftbo%wftbo%@' + lstfile, minput = '', moutput = '' , fitnames = 'LampLamp', database = 'database', interptype = 'spline3' , flux = True)
def identify(): iraf.twodspec() iraf.longslit() iraf.identify(images='Lamp.fits', section='middle column', database='database', coordlist=cdherb_file, nsum=10, match=-3.0, maxfeatures=50, zwidth=100.0, ftype='emission', fwidth=20.0, cradius=7.0, threshold=0.0, minsep=2.0, function='chebyshev', order=6, sample='*', niterate=0, low_reject=3.0, high_reject=3.0, grow=0.0, autowrite='no') iraf.flpr()
def calibrate(namelst): iraf.noao() iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='ca', extinction=extpath, caldir=stdpath) for fitname in namelst: outname = 'mark_' + fitname if os.path.isfile(outname): print('remove file ' + outname) os.remove(outname) iraf.calibrate(input=fitname, output=outname, extinct='yes', flux='yes', extinction=extpath, ignoreaps='yes', sensitivity='Sens', fnu='no') iraf.splot(images=outname) iraf.flpr()
def standard(namelst): iraf.noao() iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory=func.obs.name, extinction=extpath, caldir=stdpath) if os.path.isfile('Std'): print('remove file Std') os.remove('Std') for std_fitsname in namelst: stdname, stdmag, stdmagband = func.standard_star_info(std_fitsname) print(colored('the standard star is ' + stdname, 'green')) wid, sep = get_band_width_sep(std_fitsname) airmas = pyfits.getval(std_fitsname, 'airmass') exposure = pyfits.getval(std_fitsname, 'exptime') iraf.standard(input=std_fitsname, output='Std', samestar=True, beam_switch=False, apertures='', bandwidth=wid, bandsep=sep, # 30.0 20.0 fnuzero=3.6800000000000E-20, extinction=extpath, caldir=stdpath, observatory=func.obs.name, interact=True, graphics='stdgraph', cursor='', star_name=stdname, airmass=airmas, exptime=exposure, mag=stdmag, magband=stdmagband, teff='', answer='yes') if os.path.isfile('Sens.fits'): print('remove file Sens.fits') os.remove('Sens.fits') iraf.sensfunc(standards='Std', sensitivity='Sens', extinction=extpath, function='spline3', order=9) iraf.splot('Sens')
def standard(): stdpath = os.path.split(os.path.realpath(__file__))[0] + os.sep + 'standarddir' + os.sep print('standard dir is ' + stdpath) extpath = os.path.split(os.path.realpath(__file__))[0] + os.sep + 'LJextinct.dat' iraf.noao() iraf.twodspec() iraf.longslit(dispaxis = 2, nsum = 1, observatory = 'Lijiang', extinction = extpath, caldir = stdpath) for objname in stdgroup: stdname, stdmag, stdmagband = get_std_name(objname) print('the standard star is ' + stdname) stdmag = float(stdmag) outname1 = 'stdawftbo' + stdgroup[objname][0] inname = '' for tmpname in stdgroup[objname]: inname = inname + 'awftbo' + tmpname + ',' inname = inname[0:-1] iraf.standard(input = inname , output = outname1, samestar = True, beam_switch = False , apertures = '', bandwidth = 30.0, bandsep = 20.0 , fnuzero = 3.6800000000000E-20, extinction = extpath , caldir = stdpath, observatory = ')_.observatory' , interact = True, graphics = 'stdgraph', cursor = '' , star_name = stdname, airmass = '', exptime = '' , mag = stdmag, magband = stdmagband, teff = '', answer = 'yes') for name in stdgroup: inpar = 'stdawftbo' + stdgroup[name][0] iraf.sensfunc(standards = inpar, sensitivity = 'sensawftbo' + stdgroup[name][0], extinction = extpath, function = 'spline3', order = 9)
def fitcoord_edge_each(fname, overwrite=False): nteractive = 'yes' database = 'database' function = 'chebyshev' xorder = 2 yorder = 7 logfiles = 'STDOUT,fitcoord_edge.log' interactive = 'yes' cursor = '' # cursor = filibdir+'fitcoord_edge.cur' idfile = database + '/id' + fname if not os.path.isfile(idfile): print('\t Edge identification files do not exist. ' + idfile) return fcfile = database + '/fc' + fname if os.path.isfile(fcfile) and not overwrite: print('\t Edge fitcoord files already exist. ' + fcfile) print('\t This procedure is skipped.') return # Not to display items in IRAF packages sys.stdout = open('/dev/null', 'w') iraf.noao() iraf.twodspec() iraf.longslit() sys.stdout = sys.__stdout__ # Back to the stadard output iraf.fitcoord(fname, fitname='', interactive=interactive, \ combine='no', database=database, deletions='',\ function = function,xorder=xorder, yorder=yorder,\ logfiles=logfiles, graphics='stdgraph', cursor=cursor) return
def initialize_iraf(): iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.specred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) return
def combine_flat(lstfile): if os.path.isfile('Halogen.fits'): print 'remove Halogen.fits' os.remove('Halogen.fits') if os.path.isfile('Resp.fits'): print 'remove Resp.fits' os.remove('Resp.fits') iraf.noao() iraf.imred() iraf.ccdred() iraf.flatcombine(input='tbo//@' + lstfile, output='Halogen', combine='average', reject='crreject', ccdtype='', process=False, subsets=False, delete=False, clobber=False, scale='mode', statsec='', nlow=1, nhigh=1, nkeep=1, mclip=True, lsigma=3.0, hsigma=3.0, rdnoise='rdnoise', gain='gain', snoise=0.0, pclip=-0.5, blank=1.0) iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='Lijiang', extinction=func.config_path + os.sep + 'LJextinct.dat', caldir=func.std_path + os.sep, interp='poly5') iraf.response(calibration='Halogen', normalization='Halogen', response='Resp', interactive=True, threshold='INDEF', sample='*', naverage=1, function='spline3', order=25, low_reject=10.0, high_reject=10.0, niterate=1, grow=0.0, graphics='stdgraph', cursor='')
def fitcoords(): iraf.twodspec() iraf.longslit() iraf.fitcoords(images='Lamp', fitname='Lamp', interactive='yes', combine='no', database='database', deletions='deletions.db', function='chebyshev', xorder=6, yorder=6)
def sensfunc(): iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='Lijiang', extinction='onedstds$LJextinct.dat', caldir='onedstds$ctiocal/') iraf.sensfunc(standards='std', sensitivity='sens', extinction='onedstds$LJextinct.dat', function='spline3', order=9)
def standard(namelst): iraf.noao() iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='ca', extinction=extpath, caldir=stdpath) std_fitsname = namelst[0] stdname, stdmag, stdmagband = standard_star_info(std_fitsname) wid, sep = get_band_width_sep(stdname) print('<<<<<the standard star is ', stdname, '>>>>>') print std_fitsname if os.path.isfile('Std'): print('remove file Std') os.remove('Std') iraf.standard( input=std_fitsname, output='Std', samestar=True, beam_switch=False, apertures='', bandwidth=wid, bandsep=sep, # 30.0 20.0 fnuzero=3.6800000000000E-20, extinction=extpath, caldir=stdpath, observatory='ca', interact=True, graphics='stdgraph', cursor='', star_name=stdname, airmass='', exptime='', mag=stdmag, magband=stdmagband, teff='', answer='yes') if os.path.isfile('Sens.fits'): print('remove file Sens.fits') os.remove('Sens.fits') iraf.sensfunc(standards='Std', sensitivity='Sens', extinction=extpath, function='spline3', order=15) iraf.splot('Sens') iraf.flpr()
def cor_airmass(lstfile): f = open(lstfile) l = f.readlines() f.close() l = [tmp.split('\n')[0] for tmp in l] fitlst = ['awftbo' + tmp for tmp in l] for fitname in fitlst: if os.path.isfile(fitname): fit = pyfits.open(fitname) objname = fit[0].header['object'].replace('_', ' ').split()[0] print(fitname + ' ' + objname) objname_new = find_normal_objname(objname) if len(objname_new) == 0: objname_new = raw_input('please input object name:') radec = findradec(objname_new) if len(radec) == 0: radec = raw_input('please input ra dec of objname:') radec = radec.split() fitextnum = len(fit) fit.close() for lay in range(fitextnum): airold = iraf.hselect(images = fitname + '[%i]' % lay, fields = 'airmass', expr = 'yes', Stdout = 1) airold = float(airold[0]) print(fitname + ' ' + objname + ' ' + str(lay) + ' airmass old: ' + str(airold)) fitnamelay = fitname + '[%i]' % lay iraf.hedit(images = fitnamelay, fields = 'airold', value = airold, add = 'yes', addonly = 'yes', delete = 'no', verify = 'no', show = 'yes', update = 'yes') iraf.hedit(images = fitnamelay, fields = 'sname', value = objname_new, add = 'yes', addonly = 'yes', delete = 'no', verify = 'no', show = 'yes', update = 'yes') iraf.hedit(images = fitnamelay, fields = 'RA', value = radec[0], add = 'yes', addonly = 'yes', delete = 'no', verify = 'no', show = 'yes', update = 'yes') iraf.hedit(images = fitnamelay, fields = 'DEC', value = radec[1], add = 'yes', addonly = 'yes', delete = 'no', verify = 'no', show = 'yes', update = 'yes') iraf.twodspec() stdpath = os.path.split(os.path.realpath(__file__))[0] + os.sep + 'standarddir' + os.sep iraf.longslit(dispaxis = 2, nsum = 1, observatory = 'Lijiang', extinction = 'onedstds$LJextinct.dat', caldir = stdpath) iraf.setairmass(images = fitnamelay, observatory = 'Lijiang', intype = 'beginning', outtype = 'effective', ra = 'ra', dec = 'dec', equinox = 'epoch', st = 'lst', ut = 'date-obs', date = 'date-obs', exposure = 'exptime', airmass = 'airmass', utmiddle = 'utmiddle', scale = 750.0, show = 'yes', override = 'yes', update = 'yes') print('name airmass_new airmass_old') iraf.hselect(fitnamelay, fields = '$I,airmass,airold', expr = 'yes')
def fitcoord_dispersion(basenames, overwrite=False): print('\n#############################') print('Getting the dispersion map.') # entering the channel image directory. print('\t Entering the channel image directory, \"'+fi.chimagedir+'\".') os.chdir(fi.chimagedir) database='database' function='chebyshev' xorder=3 yorder=5 logfiles='STDOUT,fitcoord_dispersion.log' # for multi-comparison images combine = 'yes' # Not to display items in IRAF packages sys.stdout = open('/dev/null', 'w') iraf.noao() iraf.twodspec() iraf.longslit() sys.stdout = sys.__stdout__ # Back to the stadard output for i in range(1,25): fcfile = basenames[0] + '.ch%02d'%i if os.path.isfile(database + '/fc' + fcfile) and overwrite == False: print('\t FC file already exits: ' + fcfile) else: if os.path.isfile(database + '/fc' + fcfile) and overwrite == True: print('Removing ' + fcfile) try: os.remove(database + '/fc' + fcfile) except: pass infiles = '' for basename in basenames: infiles = infiles + basename + '.ch%02d,'%i iraf.fitcoord(infiles[:len(infiles)-1], fitname=fcfile, interactive='yes', \ combine=combine, database=database, deletions='',\ function = function,xorder=xorder, yorder=yorder,\ logfiles=logfiles, graphics='stdgraph', cursor='') print('Going back to the original directory.') os.chdir('..') return
def set_airmass(fn): fit = pyfits.open(fn) size = len(fit) for i, hdu in enumerate(fit): if 'AIRMASS' in hdu.header: airmassold = hdu.header['AIRMASS'] print('%s[%d] airmassold = %f' % (fn, i, airmassold), ) if 'AIROLD' in hdu.header: airold = hdu.header['AIROLD'] print('%s[%d] AIROLD = %f' % (fn, i, airold)) print( 'AIROLD keyword alreay exist, the airmass old will not saved' ) else: iraf.hedit(images=fn + '[%d]' % i, fields='AIROLD', value=airmassold, add='Yes', addonly='Yes', delete='No', verify='No', show='Yes', update='Yes') fit.close() ra, dec = get_ra_dec(fn) set_ra_dec(fn, ra, dec) iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='ca', caldir=stdpath) for i in range(size): iraf.setairmass(images=fn, observatory='ca', intype='beginning', outtype='effective', ra='ra', dec='dec', equinox='epoch', st='lst', ut='date-obs', date='date-obs', exposure='exptime', airmass='airmass', utmiddle='utmiddle', scale=750.0, show='yes', override='yes', update='yes')
def FitcoordsTask(self, ArcFile, Fits_Folder): iraf.noao(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) FitcoordsConf = self.FitcoordsAttributes(ArcFile, Fits_Folder) #Display the equivalent command in IRAF Command = self.printIrafCommand('fitcords', FitcoordsConf) print '--- Using the command' print Command iraf.twodspec.longslit.fitcoords(**FitcoordsConf) return
def ReidentifyTask(self, ArcFile, Fits_Folder, ): iraf.noao(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) ReidentifyConf = self.ReidentifyAttributes(ArcFile, Fits_Folder) #Display the equivalent command in IRAF Command = self.printIrafCommand('reidentify', ReidentifyConf) print '--- Using the command' print Command iraf.twodspec.longslit.reidentify(**ReidentifyConf) return
def identify_gap(infile, overwrite=False): print('\n#############################') print('Identifying the spectrum gaps.') database = 'database' idfile = database + '/id' + os.path.splitext(infile)[0] if os.path.exists(idfile): if overwrite: try: os.remove(idfile) except: pass else: print('\t ID file already exists: '+idfile) print('\t This precedure is skipped.') return # Checking version consistency if not fi.check_version_f(infile): return # Not to display items in IRAF packages sys.stdout = open('/dev/null', 'w') iraf.noao() iraf.twodspec() iraf.longslit() sys.stdout = sys.__stdout__ # Back to the stadard output binfct1 = fits.getval(infile, 'BIN-FCT1') coordlist = fi.filibdir+'pseudoslitgap_binx'+str(binfct1)+'.dat' iraf.identify(infile, section='middle line', database=database, \ coordlist=coordlist, units='', nsum=20,\ match=-15., ftype='absorption', fwidth=16./binfct1, \ cradius=5.,\ threshold=0., function='chebyshev', order=2, sample='*', \ niter=0, autowrite='no') iraf.reidentify(infile, infile, interac='no', nsum=50, \ section='middle line', newaps='no', override='no',\ refit='yes', trace='yes', step=100, shift=0,\ nlost=20, cradius=5., threshold=0., addfeatures='no',\ coordlist=coordlist, match=-3., \ database=database, logfile='identify_gap.log', plotfile='', \ verbose='yes', cursor='') return
def calibrate(): namelst = [i.split('\n')[0] for i in file(targetoutput)] for i in namelst: iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='Lijiang', extinction='onedstds$LJextinct.dat', caldir='onedstds$ctiocal/') iraf.calibrate(input=i, output=i.split('.')[0] + 'f.fits', extinct='yes', flux='yes', extinction='onedstds$LJextinct.dat', ignoreaps='yes', sensitivity='sens', fnu='no')
def calibrate(lstfile): stdpath = os.path.split(os.path.realpath(__file__))[0] + os.sep + 'standarddir' + os.sep extpath = os.path.split(os.path.realpath(__file__))[0] + os.sep + 'LJextinct.dat' iraf.noao() iraf.twodspec() iraf.longslit(dispaxis = 2, nsum = 1, observatory = 'Lijiang', extinction = extpath, caldir = stdpath) f = open(lstfile) l = f.readlines() f.close() l = [tmp.split('\n')[0] for tmp in l] for fitname in l: stdobjname = select_std(fitname) stdfitname = 'sensawftbo' + stdgroup[stdobjname][0] iraf.calibrate(input = 'awftbo'+ fitname, output = 'mark_awftbo' + fitname, extinct = 'yes', flux = 'yes', extinction = extpath, ignoreaps = 'yes', sensitivity = stdfitname, fnu = 'no') iraf.splot(images = 'mark_awftbo' + fitname)
def identify_each(inname, database='database', \ coordlist=fi.filibdir+'thar.300.dat', \ section_x=50, \ overwrite=False): # Not to display items in IRAF packages sys.stdout = open('/dev/null', 'w') iraf.noao() iraf.twodspec() iraf.longslit() sys.stdout = sys.__stdout__ # Back to the stadard output idfile = database + '/id' + inname if os.path.isfile(idfile) and overwrite == False: print('ID file already exists. '+idfile) else: if os.path.isfile(idfile) and overwrite == True: print('Removing ' + idfile) try: os.remove(idfile) except: pass # Creating the "section" parameter section = 'y '+str(section_x) iraf.identify(inname, section=section, database=database, coordlist=coordlist, units='', nsum=nsum, match=match, ftype='emission', fwidth=fwidth, cradius=cradius, threshold=threshold, function='chebyshev', order=order, sample='*', niter=niter, autowrite=autowrite, cursor='') iraf.reidentify(inname, inname, interac='no', section=section, newaps=newaps, override=override, refit=refit, trace=trace, step=step, shift=0, nlost=nlost, cradius=cradius, threshold=threshold, addfeatures=addfeatures, coordlist=coordlist, match=match, database=database, logfile=logfile, plotfile='', verbose=verbose, cursor='') return
def TransformTask(self, InputFile, OutputFile, Fits_Folder, ArcFile, Suffix = 'a'): iraf.noao(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) #Incase no output name is given, we generate one with the provided "preffix" (The defaul format is a_std_wolf.dat) if OutputFile == None: OutputFile = self.outputNameGenerator(InputFile, Suffix) TransConf = self.TransformAttributes(InputFile, OutputFile, Fits_Folder, ArcFile) #Display the equivalent command in IRAF Command = self.printIrafCommand('transform', TransConf) print '--- Using the command' print Command iraf.twodspec.longslit.transform(**TransConf) return OutputFile
def combine_flat(filename): outname = filename.replace('.lst','.fits') print 'run function flatcombine...' print 'make file', outname iraf.flatcombine(input = 'tbo//@' + filename , output = outname, combine = 'average', reject = 'avsigclip' , ccdtype = '', process = False, subsets = True , delete = False, clobber = False, scale = 'mode' , statsec = '', nlow = 1, nhigh = 1, nkeep = 1 , mclip = True, lsigma = 3.0, hsigma = 3.0 , rdnoise = 9.4, gain = 0.35, snoise = 0.0 , pclip = -0.5, blank = 1.0) iraf.noao() iraf.twodspec() iraf.longslit() print 'run function response...' print 'make file', 're' + outname iraf.response(calibration = outname , normalization = outname, response = 're' + outname , interactive = True, threshold = 'INDEF', sample = '*' , naverage = 1, function = 'spline3', order = 7 , low_reject = 0.0, high_reject = 0.0, niterate = 1 , grow = 0.0, graphics = 'stdgraph', cursor = '') print 'run function illumination...' print 'make file', 'il' + outname iraf.illumination(images = 're' + outname , illuminations = 'il' + outname, interactive = False , bins = '', nbins = 5, sample = '*', naverage = 1 , function = 'spline3', order = 1, low_reject = 0.0 , high_reject = 0.0, niterate = 1, grow = 0.0 , interpolator = 'poly3', graphics = 'stdgraph', cursor = '') print 'run function imarith...' print 'make file', 'per' + outname iraf.imarith(operand1 = 're' + outname , op = '/', operand2 = 'il' + outname, result = 'per' + outname , title = '', divzero = 0.0, hparams = '', pixtype = '' , calctype = '', verbose = True, noact = False) return outname, 're' + outname, 'il' + outname, 'per' + outname
def standard(): f = open(standoutput) l = f.readlines() f.close() namelst = [i.split('\n')[0] for i in l] temp = '' for i in namelst: temp = temp + i + ',' temp = temp[0:-1] # for i in xrange(len(namelst)): # iraf.hselect(images = namelst[i], fields = '$I,object', expr = 'yes') # standname = raw_input('please input standard star name:') # print 'standard star name:', standname iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='Lijiang', extinction='onedstds$LJextinct.dat', caldir='onedstds$ctiocal/') for i in xrange(len(namelst)): print '+' * 10, namelst[i] iraf.hselect(images=namelst[i], fields='$I,object', expr='yes') standname = raw_input('please input standard star name:') print 'standard star name:', standname #iraf.standard(input = namelst[i], output = namelst[i].split('.')[0] + '.std', # samestar = 'yes', interact = 'yes', star_name = standname, airmass = '', # exptime = '', extinction = 'onedstds$LJextinct.dat', # caldir = 'onedstds$ctiocal/') iraf.standard(input=namelst[i], output='std', samestar='yes', interact='yes', star_name=standname, airmass='', exptime='', extinction='onedstds$LJextinct.dat', caldir='onedstds$ctiocal/')
def combine_flat(lstfile): iraf.noao() iraf.imred() iraf.ccdred() iraf.flatcombine(input = 'tbo//@' + lstfile , output = 'Halogen', combine = 'average', reject = 'crreject' , ccdtype = '', process = False, subsets = False , delete = False, clobber = False, scale = 'mode' , statsec = '', nlow = 1, nhigh = 1, nkeep = 1 , mclip = True, lsigma = 3.0, hsigma = 3.0 , rdnoise = 'rdnoise', gain = 'gain', snoise = 0.0 , pclip = -0.5, blank = 1.0) script_path = os.path.split(os.path.realpath(__file__))[0] iraf.twodspec() iraf.longslit(dispaxis = 2, nsum = 1, observatory = 'observatory' , extinction = script_path + os.sep + 'LJextinct.dat' , caldir = script_path + os.sep + 'standarddir' + os.sep, interp = 'poly5') iraf.response(calibration = 'Halogen' , normalization = 'Halogen', response = 'Resp' , interactive = True, threshold = 'INDEF', sample = '*' , naverage = 1, function = 'spline3', order = 25 , low_reject = 10.0, high_reject = 10.0, niterate = 1 , grow = 0.0, graphics = 'stdgraph', cursor = '')
def gen_Resp_2016(): iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='observatory', extinction=func.extinction_file, caldir=func.std_path + os.sep, interp='poly5') iraf.response(calibration='Halogen', normalization='Halogen', response='Resp', interactive=True, threshold='INDEF', sample='*', naverage=1, function='spline3', order=45, low_reject=10.0, high_reject=10.0, niterate=1, grow=0.0, graphics='stdgraph', cursor='')
def floydsautoredu(files,_interactive,_dobias,_doflat,_listflat,_listbias,_listarc,_cosmic,_ext_trace,_dispersionline,liststandard,listatmo,_automaticex,_classify=False,_verbose=False,smooth=1,fringing=1): import floyds import string,re,os,glob,sys,pickle from numpy import array, arange, mean,pi,arccos,sin,cos,argmin from astropy.io import fits from pyraf import iraf import datetime os.environ["PYRAF_BETA_STATUS"] = "1" iraf.set(direc=floyds.__path__[0]+'/') _extinctdir='direc$standard/extinction/' _tel=floyds.util.readkey3(floyds.util.readhdr(re.sub('\n','',files[0])),'TELID') if _tel=='fts': _extinction='ssoextinct.dat' _observatory='sso' elif _tel=='ftn': _extinction='maua.dat' _observatory='cfht' else: sys.exit('ERROR: observatory not recognised') dv=floyds.util.dvex() scal=pi/180. iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.specred(_doprint=0) toforget = ['ccdred.flatcombine','ccdred.zerocombine','ccdproc','specred.apall','longslit.identify','longslit.reidentify',\ 'specred.standard','longslit.fitcoords','specred.transform','specred.response'] for t in toforget: iraf.unlearn(t) iraf.longslit.dispaxi=2 iraf.longslit.mode='h' iraf.identify.fwidth=7 iraf.identify.order=2 iraf.specred.dispaxi=2 iraf.specred.mode='h' iraf.ccdproc.darkcor='no' iraf.ccdproc.fixpix='no' iraf.ccdproc.trim='no' iraf.ccdproc.flatcor='no' iraf.ccdproc.overscan='no' iraf.ccdproc.zerocor='no' iraf.ccdproc.biassec='' iraf.ccdproc.ccdtype='' iraf.ccdred.instrument = "/dev/null" if _verbose: iraf.ccdred.verbose='yes' iraf.specred.verbose='yes' else: iraf.specred.verbose='no' iraf.ccdred.verbose='no' now=datetime.datetime.now() datenow=now.strftime('20%y%m%d%H%M') MJDtoday=55928+(datetime.date.today()-datetime.date(2012, 01, 01)).days outputlist=[] hdra=floyds.util.readhdr(re.sub('\n','',files[0])) _gain=floyds.util.readkey3(hdra,'gain') _rdnoise=floyds.util.readkey3(hdra,'ron') std,rastd,decstd,magstd=floyds.util.readstandard('standard_floyds_mab.txt') _naxis2=hdra.get('NAXIS2') _naxis1=hdra.get('NAXIS1') if not _naxis1: _naxis1=2079 if not _naxis2: if not hdr0.get('HDRVER'): _naxis1=511 else: _naxis1=512 _overscan='[2049:'+str(_naxis1)+',1:'+str(_naxis2)+']' _biassecblu='[380:2048,325:'+str(_naxis2)+']' _biassecred='[1:1800,1:350]' lista={} objectlist={} biaslist={} flatlist={} flatlistd={} arclist={} max_length=14 for img in files: hdr0=floyds.util.readhdr(img) if floyds.util.readkey3(hdr0,'naxis2')>=500: if 'blu' not in lista: lista['blu']=[] if 'red' not in lista: lista['red']=[] _object0=floyds.util.readkey3(hdr0,'object') _object0 = re.sub(':', '', _object0) # colon _object0 = re.sub('/', '', _object0) # slash _object0 = re.sub('\s', '', _object0) # any whitespace _object0 = re.sub('\(', '', _object0) # open parenthesis _object0 = re.sub('\[', '', _object0) # open square bracket _object0 = re.sub('\)', '', _object0) # close parenthesis _object0 = re.sub('\]', '', _object0) # close square bracket _object0 = _object0.replace(r'\t', '') # Any tab characters _object0 = _object0.replace('*', '') # Any asterisks if len(_object0) > max_length: _object0 = _object0[:max_length] _date0=floyds.util.readkey3(hdr0,'date-night') _tel=floyds.util.readkey3(hdr0,'TELID') _type=floyds.util.readkey3(hdr0,'OBSTYPE') if not _type: _type=floyds.util.readkey3(hdr0,'imagetyp') _slit=floyds.util.readkey3(hdr0,'slit') if _type: _type = _type.lower() if _type in ['sky','spectrum','expose']: nameoutb=str(_object0)+'_'+_tel+'_'+str(_date0)+'_blue_'+str(_slit)+'_'+str(MJDtoday) nameoutr=str(_object0)+'_'+_tel+'_'+str(_date0)+'_red_'+str(_slit)+'_'+str(MJDtoday) elif _type in ['lamp','arc','l']: nameoutb='arc_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_blue_'+str(_slit)+'_'+str(MJDtoday) nameoutr='arc_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_red_'+str(_slit)+'_'+str(MJDtoday) elif _type in ['flat','f','lampflat','lamp-flat']: nameoutb='flat_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_blue_'+str(_slit)+'_'+str(MJDtoday) nameoutr='flat_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_red_'+str(_slit)+'_'+str(MJDtoday) else: nameoutb=str(_type.lower())+'_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_blue_'+str(_slit)+'_'+str(MJDtoday) nameoutr=str(_type.lower())+'_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_red_'+str(_slit)+'_'+str(MJDtoday) bimg=floyds.util.name_duplicate(img,nameoutb,'') rimg=floyds.util.name_duplicate(img,nameoutr,'') #### floyds.util.delete(bimg) floyds.util.delete(rimg) iraf.imcopy(img,bimg,verbose='no') iraf.imcopy(img,rimg,verbose='no') aaa=iraf.hedit(bimg,'CCDSEC',delete='yes',update='yes',verify='no',Stdout=1) aaa=iraf.hedit(bimg,'TRIMSEC',delete='yes',update='yes',verify='no',Stdout=1) aaa=iraf.hedit(rimg,'CCDSEC',delete='yes',update='yes',verify='no',Stdout=1) aaa=iraf.hedit(rimg,'TRIMSEC',delete='yes',update='yes',verify='no',Stdout=1) iraf.ccdproc(bimg,output='', overscan="yes", trim="yes", zerocor='no', flatcor='no', zero='', ccdtype='',\ fixpix='no', trimsec=_biassecblu, biassec=_overscan, readaxi='line', Stdout=1) iraf.ccdproc(rimg,output='', overscan="yes", trim="yes", zerocor='no', flatcor='no', zero='', ccdtype='',\ fixpix='no', trimsec=_biassecred, biassec=_overscan, readaxi='line', Stdout=1) floyds.util.updateheader(bimg,0,{'GRISM':['blu',' blue order']}) floyds.util.updateheader(rimg,0,{'GRISM':['red',' blue order']}) floyds.util.updateheader(bimg,0,{'arcfile':[img,'file name in the archive']}) floyds.util.updateheader(rimg,0,{'arcfile':[img,'file name in the archive']}) lista['blu'].append(bimg) lista['red'].append(rimg) else: print 'warning type not defined' for arm in lista.keys(): for img in lista[arm]: print img hdr=floyds.util.readhdr(img) _type=floyds.util.readkey3(hdr,'OBSTYPE') if _type=='EXPOSE': _type=floyds.util.readkey3(hdr,'imagetyp') if not _type: _type='EXPOSE' if _type=='EXPOSE': print 'warning obstype still EXPOSE, are this old data ? run manually floydsfixheader' _slit=floyds.util.readkey3(hdr,'slit') _grpid=floyds.util.readkey3(hdr,'grpid') if _type.lower() in ['flat','f','lamp-flat','lampflat'] : if (arm,_slit) not in flatlist: flatlist[arm,_slit]={} if _grpid not in flatlist[arm,_slit]: flatlist[arm,_slit][_grpid]=[img] else: flatlist[arm,_slit][_grpid].append(img) elif _type.lower() in ['lamp','l','arc']: if (arm,_slit) not in arclist: arclist[arm,_slit]={} if _grpid not in arclist[arm,_slit]: arclist[arm,_slit][_grpid]=[img] else: arclist[arm,_slit][_grpid].append(img) elif _type in ['bias','b']: if arm not in biaslist: biaslist[arm]=[] biaslist[arm].append(img) elif _type.lower() in ['sky','s','spectrum']: try: _ra=float(floyds.util.readkey3(hdr,'RA')) _dec=float(floyds.util.readkey3(hdr,'DEC')) except: ra00=string.split(floyds.util.readkey3(hdr,'RA'),':') ra0,ra1,ra2=float(ra00[0]),float(ra00[1]),float(ra00[2]) _ra=((ra2/60.+ra1)/60.+ra0)*15. dec00=string.split(floyds.util.readkey3(hdr,'DEC'),':') dec0,dec1,dec2=float(dec00[0]),float(dec00[1]),float(dec00[2]) if '-' in str(dec0): _dec=(-1)*((dec2/60.+dec1)/60.+((-1)*dec0)) else: _dec=(dec2/60.+dec1)/60.+dec0 dd=arccos(sin(_dec*scal)*sin(decstd*scal)+cos(_dec*scal)*cos(decstd*scal)*cos((_ra-rastd)*scal))*((180/pi)*3600) if _verbose: print _ra,_dec print std[argmin(dd)],min(dd) if min(dd)<5200: _typeobj='std' else: _typeobj='obj' if min(dd)<5200: floyds.util.updateheader(img,0,{'stdname':[std[argmin(dd)],'']}) floyds.util.updateheader(img,0,{'magstd':[float(magstd[argmin(dd)]),'']}) if _typeobj not in objectlist: objectlist[_typeobj]={} if (arm,_slit) not in objectlist[_typeobj]: objectlist[_typeobj][arm,_slit]=[img] else: objectlist[_typeobj][arm,_slit].append(img) if _verbose: print 'object' print objectlist print 'flat' print flatlist print 'bias' print biaslist print 'arc' print arclist if liststandard and 'std' in objectlist.keys(): print 'external standard, raw standard not used' del objectlist['std'] sens={} outputfile={} atmo={} for tpe in objectlist: if tpe not in outputfile: outputfile[tpe]={} for setup in objectlist[tpe]: if setup not in sens: sens[setup]=[] print '\n### setup= ',setup,'\n### objects= ',objectlist[tpe][setup],'\n' for img in objectlist[tpe][setup]: print '\n\n### next object= ',img,' ',floyds.util.readkey3(floyds.util.readhdr(img),'object'),'\n' hdr=floyds.util.readhdr(img) archfile=floyds.util.readkey3(hdr,'arcfile') _gain=floyds.util.readkey3(hdr,'gain') _rdnoise=floyds.util.readkey3(hdr,'ron') _grism=floyds.util.readkey3(hdr,'grism') _grpid=floyds.util.readkey3(hdr,'grpid') if archfile not in outputfile[tpe]: outputfile[tpe][archfile]=[] ##################### flat ############### if _listflat: flatgood=_listflat # flat list from reducer elif setup in flatlist: if _grpid in flatlist[setup]: print '\n###FLAT WITH SAME GRPID' flatgood= flatlist[setup][_grpid] # flat in the raw data else: flatgood=[] for _grpid0 in flatlist[setup].keys(): for ii in flatlist[setup][_grpid0]: flatgood.append(ii) else: flatgood=[] if len(flatgood)!=0: if len(flatgood)>1: f=open('_oflatlist','w') for fimg in flatgood: print fimg f.write(fimg+'\n') f.close() floyds.util.delete('flat'+img) iraf.ccdred.flatcombine('"@_oflatlist"',output='flat'+img,combine='average',reject='none',ccdtype=' ',rdnoise=_rdnoise,gain=_gain, process='no', Stdout=1) floyds.util.delete('_oflatlist') flatfile='flat'+img elif len(flatgood)==1: os.system('cp '+flatgood[0]+' flat'+img) flatfile='flat'+img else: flatfile='' ########################## find arcfile ####################### arcfile='' if _listarc: arcfile= [floyds.util.searcharc(img,_listarc)[0]][0] # take arc from list if not arcfile and setup in arclist.keys(): if _grpid in arclist[setup]: print '\n###ARC WITH SAME GRPID' arcfile= arclist[setup][_grpid] # flat in the raw data else: arcfile=[] for _grpid0 in arclist[setup].keys(): for ii in arclist[setup][_grpid0]: arcfile.append(ii) if arcfile: if len(arcfile)>1: # more than one arc available print arcfile # _arcclose=floyds.util.searcharc(imgex,arcfile)[0] # take the closest in time _arcclose=floyds.sortbyJD(arcfile)[-1] # take the last arc of the sequence if _interactive.upper() in ['YES','Y']: for ii in floyds.floydsspecdef.sortbyJD(arcfile): print '\n### ',ii arcfile=raw_input('\n### more than one arcfile available, which one to use ['+str(_arcclose)+'] ? ') if not arcfile: arcfile=_arcclose else: arcfile=_arcclose else: arcfile=arcfile[0] else: print '\n### Warning: no arc found' ################################################################### rectify if setup[0]=='red': fcfile=floyds.__path__[0]+'/standard/ident/fcrectify_'+_tel+'_red' fcfile1=floyds.__path__[0]+'/standard/ident/fcrectify1_'+_tel+'_red' print fcfile else: fcfile=floyds.__path__[0]+'/standard/ident/fcrectify_'+_tel+'_blue' fcfile1=floyds.__path__[0]+'/standard/ident/fcrectify1_'+_tel+'_blue' print fcfile print img,arcfile,flatfile img0=img if img and img not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(img) if arcfile and arcfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(arcfile) if flatfile and flatfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(flatfile) img,arcfile,flatfile=floyds.floydsspecdef.rectifyspectrum(img,arcfile,flatfile,fcfile,fcfile1,'no',_cosmic) if img and img not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(img) if arcfile and arcfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(arcfile) if flatfile and flatfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(flatfile) ################################################################### check wavecalib if tpe=='std' or floyds.util.readkey3(floyds.util.readhdr(img),'exptime') < 300: if setup[0]=='red': print '\n### check standard wave calib' data, hdr = fits.getdata(img, 0, header=True) y=data.mean(1) import numpy as np if np.argmax(y) < 80 and np.argmax(y) > 15: y2=data[np.argmax(y)-3:np.argmax(y)+3].mean(0) yy2=data[np.argmax(y)-9:np.argmax(y)-3].mean(0) floyds.util.delete('_std.fits') fits.writeto('_std.fits', np.float32(y2-yy2), hdr) shift=floyds.floydsspecdef.checkwavestd('_std.fits',_interactive,2) zro=hdr['CRVAL1'] floyds.util.updateheader(img,0,{'CRVAL1':[zro+int(shift),'']}) floyds.util.updateheader(img,0,{'shift':[float(shift),'']}) floyds.util.delete('_std.fits') else: print 'object not found' else: print '\n### warning check in wavelength not possible for short exposure in the blu range ' else: print '\n### check object wave calib' _skyfile=floyds.__path__[0]+'/standard/ident/sky_'+setup[0]+'.fits' data, hdr = fits.getdata(img, 0, header=True) y=data.mean(1) import numpy as np if np.argmax(y) < 80 and np.argmax(y) > 15: yy1=data[10:np.argmax(y)-9].mean(0) yy2=data[np.argmax(y)+9:-10].mean(0) floyds.util.delete('_sky.fits') fits.writeto('_sky.fits', np.float32(yy1+yy2), hdr) shift=floyds.floydsspecdef.checkwavelength_obj('_sky.fits',_skyfile,_interactive,usethirdlayer=False) floyds.util.delete('_sky.fits') zro=hdr['CRVAL1'] floyds.util.updateheader(img,0,{'CRVAL1':[zro+int(shift),'']}) floyds.util.updateheader(img,0,{'shift':[float(shift),'']}) else: print 'object not found' #################################################### flat field if img and flatfile and setup[0]=='red': imgn='n'+img hdr1 = floyds.readhdr(img) hdr2 = floyds.readhdr(flatfile) _grpid1=floyds.util.readkey3(hdr1,'grpid') _grpid2=floyds.util.readkey3(hdr2,'grpid') if _grpid1==_grpid2: print flatfile,img,setup[0] imgn=floyds.fringing_classicmethod2(flatfile,img,'no','*',15,setup[0]) else: print 'Warning flat not the same OB' imgex=floyds.floydsspecdef.extractspectrum(img,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex) floyds.delete('flat'+imgex) iraf.specred.apsum(flatfile,output='flat'+imgex,referen=img,interac='no',find='no',recente='no',resize='no',\ edit='no',trace='no',fittrac='no',extract='yes',extras='no',review='no',backgro='none') fringingmask=floyds.normflat('flat'+imgex) print '\n### fringing correction' print imgex,fringingmask imgex,scale,shift=floyds.correctfringing_auto(imgex,fringingmask) # automatic correction shift=int(.5+float(shift)/3.5) # shift from correctfringing_auto in Angstrom print '\n##### flat scaling: ',str(scale),str(shift) ######################################################## datax, hdrx = fits.getdata(flatfile, 0, header=True) xdim=hdrx['NAXIS1'] ydim=hdrx['NAXIS2'] iraf.specred.apedit.nsum=15 iraf.specred.apedit.width=100. iraf.specred.apedit.line=1024 iraf.specred.apfind.minsep=20. iraf.specred.apfind.maxsep=1000. iraf.specred.apresize.bkg='no' iraf.specred.apresize.ylevel=0.5 iraf.specred.aptrace.nsum=10 iraf.specred.aptrace.step=10 iraf.specred.aptrace.nlost=10 floyds.util.delete('n'+flatfile) floyds.util.delete('norm.fits') floyds.util.delete('n'+img) floyds.util.delete(re.sub('.fits','c.fits',flatfile)) iraf.imcopy(flatfile+'[500:'+str(xdim)+',*]',re.sub('.fits','c.fits',flatfile),verbose='no') iraf.imarith(flatfile,'/',flatfile,'norm.fits',verbose='no') flatfile=re.sub('.fits','c.fits',flatfile) floyds.util.delete('n'+flatfile) iraf.unlearn(iraf.specred.apflatten) floyds.floydsspecdef.aperture(flatfile) iraf.specred.apflatten(flatfile,output='n'+flatfile,interac=_interactive,find='no',recenter='no', resize='no',edit='no',trace='no',\ fittrac='no',fitspec='no', flatten='yes', aperture='',\ pfit='fit2d',clean='no',function='legendre',order=15,sample = '*', mode='ql') iraf.imcopy('n'+flatfile,'norm.fits[500:'+str(xdim)+',*]',verbose='no') floyds.util.delete('n'+flatfile) floyds.util.delete('n'+img) iraf.imrename('norm.fits','n'+flatfile,verbose='no') imgn=floyds.floydsspecdef.applyflat(img,'n'+flatfile,'n'+img,scale,shift) else: imgn='' if imgn and imgn not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgn) ################################################### 2D flux calib hdr=floyds.util.readhdr(img) _sens='' if liststandard: _sens=floyds.util.searchsens(img,liststandard)[0] # search in the list from reducer if not _sens: try: _sens=floyds.util.searchsens(img,sens[setup])[0] # search in the reduced data except: _sens=floyds.util.searchsens(img,'')[0] # search in tha archive if _sens: if _sens[0]=='/': os.system('cp '+_sens+' .') _sens=string.split(_sens,'/')[-1] imgd=fluxcalib2d(img,_sens) if imgn: imgdn=fluxcalib2d(imgn,_sens) else: imgdn='' if _sens not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(_sens) else: imgdn='' print '\n### do 2D calibration' else: imgd='' imgdn='' ################ extraction #################################### if imgdn: try: imgdnex=floyds.floydsspecdef.extractspectrum(imgdn,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex) except Exception as e: print 'failed to extract', imgdn print e imgdnex='' else: imgdnex='' if imgd: try: imgdex=floyds.floydsspecdef.extractspectrum(imgd,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex) except Exception as e: print 'failed to extract', imgd print e imgdex='' else: imgdex='' if imgd and imgd not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgd) if imgdn and imgdn not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdn) if imgdnex and imgdnex not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdnex) if imgdex and imgdex not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdex) if tpe=='std': if imgn: try: imgnex=floyds.floydsspecdef.extractspectrum(imgn,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex) except Exception as e: print 'failed to extract', imgn print e imgnex='' elif img: try: imgnex=floyds.floydsspecdef.extractspectrum(img,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex) except Exception as e: print 'failed to extract', img print e imgnex='' if imgnex: hdrs=floyds.util.readhdr(imgnex) _tel=floyds.util.readkey3(hdrs,'TELID') try: _outputsens2='sens_'+_tel+'_'+str(floyds.util.readkey3(hdrs,'date-night'))+'_'+str(floyds.util.readkey3(hdrs,'grism'))+\ '_'+re.sub('.dat','',floyds.util.readkey3(hdrs,'stdname'))+'_'+str(MJDtoday) except: sys.exit('Error: missing header -stdname- in standard '+str(standardfile)+' ') print '\n### compute sensitivity function and atmofile' if setup[0]=='red': atmofile=floyds.floydsspecdef.telluric_atmo(imgnex) if atmofile and atmofile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(atmofile) stdusedclean=re.sub('_ex.fits','_clean.fits',imgnex) floyds.util.delete(stdusedclean) _function='spline3' iraf.specred.sarith(input1=imgnex,op='/',input2=atmofile,output=stdusedclean, format='multispec') try: _outputsens2=floyds.floydsspecdef.sensfunction(stdusedclean,_outputsens2,_function,8,_interactive) except: print 'Warning: problem computing sensitivity function' _outputsens2='' if setup not in atmo: atmo[setup]=[atmofile] else: atmo[setup].append(atmofile) else: _function='spline3' try: _outputsens2=floyds.floydsspecdef.sensfunction(imgnex,_outputsens2,_function,12,_interactive,'3400:4700')#,3600:4300') except: print 'Warning: problem computing sensitivity function' _outputsens2='' if _outputsens2 and _outputsens2 not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(_outputsens2) ################################################### if 'obj' in outputfile: for imm in outputfile['obj']: lista = [] tt_red = '' ntt_red = '' tt_blue = '' for f in outputfile['obj'][imm]: if '_ex.fits' in f and '_blue_' in f: tt_blue = f elif '_ex.fits' in f and f[:3] == 'ntt': ntt_red = f elif '_ex.fits' in f and f[:2] == 'tt': tt_red = f else: lista.append(f) merged = ntt_red.replace('_red_', '_merge_') if tt_blue and ntt_red: floyds.floydsspecdef.combspec2(tt_blue, ntt_red, merged, scale=True, num=None) if os.path.isfile(merged): lista.append(merged) floyds.util.delete(tt_blue) floyds.util.delete(tt_red) floyds.util.delete(ntt_red) else: if tt_blue: lista.append(tt_blue) if tt_red: lista.append(tt_red) if ntt_red: lista.append(ntt_red) outputfile['obj'][imm] = lista readme=floyds.floydsspecauto.writereadme() return outputfile,readme
global iraf from pyraf import iraf import numpy as np import pyfits from glob import glob import os iraf.pysalt() iraf.saltspec() iraf.saltred() iraf.set(clobber='YES') iraf.noao() iraf.twodspec() iraf.longslit() def tofits(filename, data, hdr=None, clobber=False): """simple pyfits wrapper to make saving fits files easier.""" from pyfits import PrimaryHDU, HDUList hdu = PrimaryHDU(data) if hdr is not None: hdu.header = hdr hdulist = HDUList([hdu]) hdulist.writeto(filename, clobber=clobber, output_verify='ignore') def get_ims(fs, imtype): imtypekeys = {'sci': 'OBJECT', 'arc': 'ARC', 'flat': 'FLAT'} ims = [] grangles = [] for f in fs:
def efoscfastredu(imglist, _listsens, _listarc, _ext_trace, _dispersionline, _cosmic, _interactive): # print "LOGX:: Entering `efoscfastredu` method/function in %(__file__)s" # % globals() import string import os import re import sys os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits from ntt.util import readhdr, readkey3 import ntt import numpy as np dv = ntt.dvex() scal = np.pi / 180. if not _interactive: _interactive = False _inter = 'NO' else: _inter = 'YES' from pyraf import iraf iraf.noao(_doprint=0, Stdout=0) iraf.imred(_doprint=0, Stdout=0) iraf.ccdred(_doprint=0, Stdout=0) iraf.twodspec(_doprint=0, Stdout=0) iraf.longslit(_doprint=0, Stdout=0) iraf.onedspec(_doprint=0, Stdout=0) iraf.specred(_doprint=0, Stdout=0) toforget = [ 'ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext' ] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' # not print steps iraf.specred.verbose = 'no' # not print steps iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' _gain = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'gain') _ron = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'ron') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain iraf.specred.dispaxi = 2 iraf.longslit.dispaxi = 2 iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" iraf.set(direc=ntt.__path__[0] + '/') for img in imglist: hdr = ntt.util.readhdr(img) _tech = ntt.util.readkey3(hdr, 'tech') if _tech != 'SPECTRUM': sys.exit('error: ' + str(img) + ' is not a spectrum ') print '\n#### image name = ' + img + '\n' _grism0 = readkey3(hdr, 'grism') _filter0 = readkey3(hdr, 'filter') _slit0 = readkey3(hdr, 'slit') _object0 = readkey3(hdr, 'object') _date0 = readkey3(hdr, 'date-night') setup = (_grism0, _filter0, _slit0) _biassec0 = '[3:1010,1026:1029]' if _grism0 == 'Gr16': _trimsec0 = '[100:950,1:950]' elif _grism0 == 'Gr13': if _filter0 == 'Free': _trimsec0 = '[100:950,1:1015]' elif _filter0 == 'GG495': _trimsec0 = '[100:950,208:1015]' elif _filter0 == 'OG530': _trimsec0 = '[100:950,300:1015]' elif _grism0 == 'Gr11': _trimsec0 = '[100:950,5:1015]' else: _trimsec0 = '[100:950,5:1015]' _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = 't' + str(_object0) + '_' + str(_date0) for _set in setup: nameout0 = nameout0 + '_' + _set nameout0 = ntt.util.name_duplicate(img, nameout0, '') timg = nameout0 if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) iraf.ccdproc(timg, output='', overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=_biassec0, Stdout=1) img = timg if _listarc: arcfile = ntt.util.searcharc(img, _listarc)[0] else: arcfile = '' if not arcfile: arcfile = ntt.util.searcharc(img, '')[0] else: iraf.ccdproc(arcfile, output='t' + arcfile, overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) arcfile = 't' + arcfile if _cosmic: # print cosmic rays rejection ntt.cosmics.lacos(img, output='', gain=_gain, readn=_ron, xorder=9, yorder=9, sigclip=4.5, sigfrac=0.5, objlim=1, verbose=True, interactive=False) print '\n### cosmic rays rejections ........ done ' if not arcfile: print '\n### warning no arcfile \n exit ' else: arcref = ntt.util.searcharc(img, '')[0] if arcfile[0] == '/': os.system('cp ' + arcfile + ' ' + string.split(arcfile, '/')[-1]) arcfile = string.split(arcfile, '/')[-1] arcref = string.split(arcref, '/')[-1] if arcref: os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] if not os.path.isdir('database/'): os.mkdir('database/') if os.path.isfile( ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref)): os.system('cp ' + ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref) + ' database/') iraf.longslit.reidentify( referenc=arcref, images=arcfile, interac=_inter, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=0, newaps='no', nsum=5, nlost=2, mode='h', verbose='no') else: iraf.longslit.identify( images=arcfile, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', nsum=10, fwidth=7, order=3, mode='h') iraf.longslit.reident( referenc=arcfile, images=arcfile, interac='NO', section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=10, newaps='yes', nsum=5, nlost=2, mode='h', verbose='no') qqq = iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile), fitname=re.sub('.fits', '', arcfile), interac='no', combine='yes', databas='database', function='legendre', yorder=4, logfile='logfile', plotfil='', mode='h') iraf.specred.transform(input=img, output=img, minput='', fitnames=re.sub('.fits', '', arcfile), databas='database', x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h', logfile='logfile') # ###################### check wavelength calibration ############ _skyfile = ntt.__path__[0] + '/standard/ident/sky_' + setup[ 0] + '_' + setup[1] + '.fits' shift = ntt.efoscspec2Ddef.skyfrom2d(img, _skyfile) print '\n### check in wavelengh performed ...... spectrum shifted of ' + str( shift) + ' Angstrom \n' zro = pyfits.open(img)[0].header.get('CRVAL2') ntt.util.updateheader(img, 0, {'CRVAL2': [zro + int(shift), '']}) std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') hdrt = readhdr(img) _ra = readkey3(hdrt, 'RA') _dec = readkey3(hdrt, 'DEC') _object = readkey3(hdrt, 'object') dd = np.arccos( np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos( (_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' ntt.util.updateheader(img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) else: _type = 'obj' print '\n### EXTRACTION USING IRAF TASK APALL \n' result = [] if _type == 'obj': imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, _type) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader( imgex, 0, { 'PRODCATG': [ 'SCIENCE.' + readkey3(readhdr(imgex), 'tech').upper(), 'Data product category' ] }) ntt.util.updateheader(imgex, 0, {'TRACE1': [img, '']}) result.append(imgex) if _listsens: sensfile = ntt.util.searchsens(img, _listsens)[0] else: sensfile = '' if not sensfile: sensfile = ntt.util.searchsens(img, '')[0] if sensfile: imgf = re.sub('.fits', '_f.fits', img) _extinctdir = 'direc$standard/extinction/' _extinction = 'extinction_lasilla.dat' _observatory = 'lasilla' _exptime = readkey3(hdrt, 'exptime') _airmass = readkey3(hdrt, 'airmass') ntt.util.delete(imgf) iraf.specred.calibrate(input=imgex, output=imgf, sensiti=sensfile, extinct='yes', flux='yes', ignorea='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, exptime=_exptime, fnu='no') hedvec = { 'SENSFUN': [ string.split(sensfile, '/')[-1], 'sensitivity function' ], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum '], 'SNR': [ntt.util.StoN2(imgf, False), 'Average S/N ratio'], 'BUNIT': ['erg/cm2/s/Angstrom', 'Flux Calibration Units'], 'TRACE1': [imgex, ''] } ntt.util.updateheader(imgf, 0, hedvec) imgout = imgf imgd = ntt.efoscspec1Ddef.fluxcalib2d(img, sensfile) ntt.util.updateheader( imgd, 0, { 'FILETYPE': [ 22209, '2D wavelength and flux calibrated spectrum ' ] }) ntt.util.updateheader(imgd, 0, {'TRACE1': [img, '']}) imgasci = re.sub('.fits', '.asci', imgout) ntt.util.delete(imgasci) iraf.onedspec.wspectext(imgout + '[*,1,1]', imgasci, header='no') result = result + [imgout, imgd, imgasci] else: imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'std') imgout = ntt.efoscspec1Ddef.sensfunction( imgex, 'spline3', 6, _inter) result = result + [imgout] for img in result: if img[-5:] == '.fits': ntt.util.phase3header(img) # phase 3 definitions ntt.util.airmass(img) # phase 3 definitions ntt.util.updateheader( img, 0, {'quality': ['Rapid', 'Final or Rapid reduction']}) return result
def telluric_atmo(imgstd): # print "LOGX:: Entering `telluric_atmo` method/function in %(__file__)s" # % globals() import numpy as np import ntt from pyraf import iraf try: import pyfits except: from astropy.io import fits as pyfits iraf.images(_doprint=0) iraf.noao(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) toforget = [ 'imfilter.gauss', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'onedspec.wspectext' ] for t in toforget: iraf.unlearn(t) _grism = ntt.util.readkey3(ntt.util.readhdr(imgstd), 'grism') imgout = 'invers_atmo_' + imgstd ntt.util.delete(imgout) iraf.set(direc=ntt.__path__[0] + '/') _cursor = 'direc$standard/ident/cursor_sky_0' iraf.noao.onedspec.bplot(imgstd, cursor=_cursor, spec2=imgstd, new_ima=imgout, overwri='yes') xxstd, ffstd = ntt.util.readspectrum(imgout) if _grism in ['Gr13', 'Gr16']: llo2 = np.compress( (np.array(xxstd) >= 7550) & (np.array(xxstd) <= 7750), np.array(xxstd)) llh2o = np.compress( (np.array(xxstd) >= 7100) & (np.array(xxstd) <= 7500), np.array(xxstd)) ffo2 = np.compress( (np.array(xxstd) >= 7550) & (np.array(xxstd) <= 7750), np.array(ffstd)) ffh2o = np.compress( (np.array(xxstd) >= 7100) & (np.array(xxstd) <= 7500), np.array(ffstd)) elif _grism in ['Gr11']: llo2 = np.compress( (np.array(xxstd) >= 6830) & (np.array(xxstd) <= 7100), np.array(xxstd)) llh2o = np.compress( (np.array(xxstd) >= 7100) & (np.array(xxstd) <= 7500), np.array(xxstd)) ffo2 = np.compress( (np.array(xxstd) >= 6830) & (np.array(xxstd) <= 7100), np.array(ffstd)) ffh2o = np.compress( (np.array(xxstd) >= 7100) & (np.array(xxstd) <= 7500), np.array(ffstd)) if _grism in ['Gr13', 'Gr16', 'Gr11']: _skyfileh2o = 'direc$standard/ident/ATLAS_H2O.fits' _skyfileo2 = 'direc$standard/ident/ATLAS_O2.fits' atlas_smooto2 = '_atlas_smoot_o2.fits' atlas_smooth2o = '_atlas_smoot_h2o.fits' _sigma = 200 ntt.util.delete(atlas_smooto2) ntt.util.delete(atlas_smooth2o) iraf.imfilter.gauss(_skyfileh2o, output=atlas_smooth2o, sigma=_sigma) iraf.imfilter.gauss(_skyfileo2, output=atlas_smooto2, sigma=_sigma) llskyh2o, ffskyh2o = ntt.util.readspectrum(atlas_smooth2o) llskyo2, ffskyo2 = ntt.util.readspectrum(atlas_smooto2) ffskyo2cut = np.interp(llo2, llskyo2, ffskyo2) ffskyh2ocut = np.interp(llh2o, llskyh2o, ffskyh2o) _scaleh2o = [] integral_h2o = [] for i in range(1, 21): j = 0.6 + i * 0.04 _ffskyh2ocut = list((np.array(ffskyh2ocut) * j) + 1 - j) diff_h2o = abs(_ffskyh2ocut - ffh2o) integraleh2o = np.trapz(diff_h2o, llh2o) integral_h2o.append(integraleh2o) _scaleh2o.append(j) _scaleo2 = [] integral_o2 = [] for i in range(1, 21): j = 0.6 + i * 0.04 _ffskyo2cut = list((np.array(ffskyo2cut) * j) + 1 - j) diff_o2 = abs(_ffskyo2cut - ffo2) integraleo2 = np.trapz(diff_o2, llo2) integral_o2.append(integraleo2) _scaleo2.append(j) sh2o = _scaleh2o[np.argmin(integral_h2o)] so2 = _scaleo2[np.argmin(integral_o2)] telluric_features = ((np.array(ffskyh2o) * sh2o) + 1 - sh2o) + ( (np.array(ffskyo2) * so2) + 1 - so2) - 1 telluric_features = np.array([1] + list(telluric_features) + [1]) llskyo2 = np.array([1000] + list(llskyo2) + [15000]) telluric_features_cut = np.interp(xxstd, llskyo2, telluric_features) _imgout = 'atmo_' + imgstd data1, hdr = pyfits.getdata(imgstd, 0, header=True) data1[0] = np.array(telluric_features_cut) data1[1] = data1[1] / data1[1] data1[2] = data1[2] / data1[2] data1[3] = data1[3] / data1[3] ntt.util.delete(_imgout) pyfits.writeto(_imgout, np.float32(data1), hdr) ntt.util.delete(atlas_smooto2) ntt.util.delete(atlas_smooth2o) ntt.util.delete(imgout) else: _imgout = '' print '### telluric correction with model not possible ' return _imgout
import os folderroot = '/Users/lucaizzo/Documents/NOT/test/' os.chdir(folderroot) import numpy as np from astropy.io import fits from matplotlib import pyplot as plt import shutil import sys from pyraf import iraf iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.kpnoslit(_doprint=0) iraf.astutil(_doprint=0) iraf.onedspec(_doprint=0) iraf.twodspec.longslit.dispaxis = 2 #read object keywords for file in os.listdir(os.getcwd()): if file.endswith('.fits'): testfile = file hduo = fits.open(testfile) #name targets (science & standard) target = hduo[0].header['OBJECT'] #target2 = 'SP0644p375'
def load_modules(): # Define a function to load all of the modules so that they don't' import # unless we need them global iraf from pyraf import iraf iraf.pysalt() iraf.saltspec() iraf.saltred() iraf.set(clobber='YES') global sys import sys global os import os global shutil import shutil global glob from glob import glob global pyfits import pyfits global np import numpy as np global lacosmicx import lacosmicx global interp from scipy import interp global signal from scipy import signal global ndimage from scipy import ndimage global interpolate from scipy import interpolate global WCS from astropy.wcs import WCS global optimize from scipy import optimize global ds9 import ds9 global GaussianProcess from sklearn.gaussian_process import GaussianProcess global pandas import pandas iraf.onedspec() iraf.twodspec() iraf.longslit() iraf.apextract() iraf.imutil()
def load_modules(): # Define a function to load all of the modules so that they don't' import # unless we need them global iraf from pyraf import iraf iraf.pysalt() iraf.saltspec() iraf.saltred() iraf.set(clobber='YES') global sys import sys global os import os global shutil import shutil global glob from glob import glob global pyfits import pyfits global np import numpy as np global lacosmicx import lacosmicx global interp from scipy import interp global signal from scipy import signal global ndimage from scipy import ndimage global interpolate from scipy import interpolate global WCS from astropy.wcs import WCS global optimize from scipy import optimize global ds9 import pyds9 as ds9 global GaussianProcess from sklearn.gaussian_process import GaussianProcess global pandas import pandas iraf.onedspec() iraf.twodspec() iraf.longslit() iraf.apextract() iraf.imutil() iraf.rvsao(motd='no')
def sofispecreduction(files, _interactive, _doflat, listflat, _docross, _verbose=False): # print "LOGX:: Entering `sofispecreduction` method/function in # %(__file__)s" % globals() import ntt from ntt.util import delete, readhdr, readkey3, correctcard, rangedata import string, re, sys, os, glob try: from astropy.io import fits as pyfits except: import pyfits from pyraf import iraf from numpy import argmin, array, min, isnan, arange, mean, sum from numpy import sqrt, pi iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.specred(_doprint=0) toforget = ['ccdred.flatcombine', 'ccdproc', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'longslit.fitcoords', 'specred.transform', 'specred.response', 'imutil.hedit'] for t in toforget: iraf.unlearn(t) iraf.longslit.dispaxi = 2 iraf.longslit.mode = 'h' iraf.specred.dispaxi = 2 iraf.specred.mode = 'h' iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.overscan = 'no' iraf.ccdproc.ccdtype = '' iraf.ccdred.instrument = "/dev/null" iraf.set(direc=ntt.__path__[0] + '/') if _interactive: _interact = 'yes' else: _interact = 'no' if _verbose: iraf.ccdred.verbose = 'yes' iraf.specred.verbose = 'yes' else: iraf.specred.verbose = 'no' iraf.ccdred.verbose = 'no' import datetime import time now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days # if they are not sorted the fieldlist dict could crash files = ntt.sofiphotredudef.sortbyJD(files) outputlist = [] setup = [] fieldlist = {} OBID = {} RA = {} DEC = {} objects = {} flats = {} lamps1 = {} _rdnoise = readkey3(readhdr(re.sub('\n', '', files[0])), 'ron') _gain = readkey3(readhdr(re.sub('\n', '', files[0])), 'gain') for img in files: img = re.sub('\n', '', img) hdr = readhdr(img) _object = readkey3(hdr, 'object') _filter = readkey3(hdr, 'filter') _date = readkey3(hdr, 'date-night') _exptime = readkey3(hdr, 'exptime') _grism = readkey3(hdr, 'grism') _obsmode = readkey3(hdr, 'obsmode') _type = '' if _grism.lower() not in ['gr', 'gb']: _type = 'image' if not _type: if _object.lower() == 'flat': _type = 'flat' if _date not in flats: flats[_date] = {} if _grism not in flats[_date]: flats[_date][_grism] = [img] else: flats[_date][_grism].append(img) elif _object.lower() == 'lamp': _lampid = (readkey3(hdr, 'esoid'), readkey3(hdr, 'grism')) if _lampid not in lamps1: lamps1[_lampid] = [None, None] if readkey3(hdr, 'lamp1') == 'Xenon': lamps1[_lampid][0] = img else: lamps1[_lampid][1] = img _type = 'lamp' # if readkey3(hdr,'lamp1')=='Xenon': # _type='lamp' # if _grism not in lamps: # lamps[_grism]=[img] # else: # lamps[_grism].append(img) # else: # _type='notgood' if not _type: _ra = readkey3(hdr, 'RA') _dec = readkey3(hdr, 'DEC') _object_name = readkey3(hdr, 'object') _OBID = (readkey3(hdr, 'esoid'), _grism) if string.count(_object_name, '/') or string.count(_object_name, '.') or string.count(_object_name, ' '): nameobj = string.split(_object_name, '/')[0] nameobj = string.split(nameobj, ' ')[0] nameobj = string.split(nameobj, '.')[0] else: nameobj = _object_name if _grism not in fieldlist: fieldlist[_grism] = {} if _OBID not in OBID: count = 1 nameobj0 = nameobj + '_' + str(count) answ = 'yes' while answ == 'yes': if nameobj0 in fieldlist[_grism]: count = count + 1 nameobj0 = nameobj + '_' + str(count) else: answ = 'no' fieldlist[_grism][nameobj0] = [] OBID[readkey3(hdr, 'esoid'), _grism] = nameobj0 fieldlist[_grism][nameobj0].append(img) if _verbose: print img print _type, _object, _filter print 'lamps', lamps1 lamps = {} for _lampid in lamps1: lamp = '' output = 'arc_' + str(_lampid[0]) + '_' + str(_lampid[1]) + '.fits' if lamps1[_lampid][0] and lamps1[_lampid][1]: print lamps1[_lampid][0], lamps1[_lampid][1] # try: ntt.util.delete(output) iraf.imarith(lamps1[_lampid][0], '-', lamps1[_lampid] [1], result=output, verbose='yes') # except: # print 'warning, lamp file not ON/OFF' # os.system('cp '+lamps1[_lampid][0]+' '+output) lamp = output elif lamps1[_lampid][0] and not lamps1[_lampid][1]: os.system('cp ' + lamps1[_lampid][0] + ' ' + output) lamp = output if lamp: if _lampid[1] not in lamps: lamps[_lampid[1]] = [lamp] else: lamps[_lampid[1]].append(lamp) if _verbose: print '\n### FIELDS\n', fieldlist print '\n### OBID\n', OBID print '\n### FLATS\n', flats print '\n### LAMPS\n', lamps # if not flats: # sys.exit('\n### error: spectroscopic flat not available, add flats in the directory and try again') # if not lamps: # sys.exit('\n### error: spectroscopic lamp not available, add lamps in # the directory and try again') if not listflat: print '\n### list of available spectroscopic flats (ON,OFF):' for _date in flats: for _grism in flats[_date]: for img in flats[_date][_grism]: if pyfits.open(img)[0].data.mean() >= 2000: print img, _grism, _date, 'ON ? ' else: print img, _grism, _date, 'OFF ? ' for _date in flats: for _grism in flats[_date]: flat = {'ON': [], 'OFF': []} for img in flats[_date][_grism]: _type = '' if readkey3(hdr, 'lamp3'): print '\n### header lamp3 found: flat ON ', str(img) _type = 'ON' else: if pyfits.open(img)[0].data.mean() >= 2000: _type = 'ON' else: _type = 'OFF' aa, bb, cc = ntt.util.display_image(img, 1, '', '', False) print '\n### number of flat already selected (ON,OFF): \n ### please select same number ' \ 'of ON and OFF flats \n' + \ str(len(flat['ON'])) + ' ' + str(len(flat['OFF'])) print '\n### image ' + str(img) answ = raw_input( 'ON/OFF/REJECT/STOP [' + str(_type) + '] ok (ON[n]/OFF[f]/r/s) [' + _type + '] ? ') if not answ: answ = _type if answ in ['ON', 'on', 'n']: _type = 'ON' if answ in ['OFF', 'off', 'f']: _type = 'OFF' if answ in ['s', 'S', 'STOP', 'stop', 'Stop']: _type = 'stop' if answ in ['r', 'R', 'reject']: _type = 'r' if _type in ['ON', 'OFF']: flat[_type].append(img) elif _type == 'stop': if len(flat['ON']) == len(flat['OFF']) and len(flat['OFF']) >= 2: break elif len(flat['ON']) == len(flat['OFF']) and len(flat['OFF']) == 0: break else: print '\n### Warning: you can stop only if the numbers of ON and OFF are the same' print len(flat['ON']), len(flat['OFF']) if len(flat['ON']) == len(flat['OFF']) and len(flat['OFF']) >= 2: ff = open('_flatlist', 'w') for ii in range(0, len(flat['OFF'])): delete('flat_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str(ii) + '.fits') iraf.imarith(flat['ON'][ii], '-', flat['OFF'][ii], result='flat_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str( ii) + '.fits', verbose='no') ff.write( 'flat_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str(ii) + '.fits\n') ff.close() masterflat = 'flat_' + \ str(_date) + '_' + str(_grism) + \ '_' + str(MJDtoday) + '.fits' delete(masterflat) _order = '80' iraf.ccdred.flatcombine(input='@_flatlist', output=masterflat, combine='median', rdnoise=_rdnoise, gain=_gain, ccdtype='') hdr = readhdr(masterflat) matching = [s for s in hdr.keys() if "IMCMB" in s] for imcmb in matching: aaa = iraf.hedit(masterflat, imcmb, delete='yes', update='yes', verify='no', Stdout=1) delete('_flatlist') print masterflat correctcard(masterflat) if masterflat not in outputlist: outputlist.append(masterflat) ntt.util.updateheader(masterflat, 0, {'FILETYPE': [41102, 'flat field'], 'SINGLEXP': [False, 'TRUE if resulting from single exposure'], 'M_EPOCH': [False, 'TRUE if resulting from multiple epochs']}) print '\n### master flat ........... done ' delete('n' + masterflat) iraf.specred.response(masterflat, normaliz=masterflat + '[100:900,*]', response='n' + masterflat, interac=_interact, thresho='INDEF', sample='*', naverage=2, function='spline3', low_rej=3, high_rej=3, order=_order, niterat=20, grow=0, graphic='stdgraph', mode='q') listflat.append('n' + masterflat) if 'n' + masterflat not in outputlist: outputlist.append('n' + masterflat) ntt.util.updateheader('n' + masterflat, 0, {'FILETYPE': [41203, 'normalized flat field'], 'TRACE1': [masterflat, 'Originating file']}) # ntt.util.updateheader('n'+masterflat,0,{'TRACE1':[masterflat,'']}) flattot = flat['ON'] + flat['OFF'] num = 0 for img in flattot: num = num + 1 ntt.util.updateheader(masterflat, 0, { 'PROV' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file'], 'TRACE' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file']}) ntt.util.updateheader('n' + masterflat, 0, { 'PROV' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file']}) if listflat: print '\n### flat available:\n### ' + str(listflat), '\n' elif len(flat['ON']) == len(flat['OFF']) and len(flat['OFF']) == 0: print '\n### no good flats in this set ......' else: sys.exit('\n### Error: number of ON and OFF not the same') for _grism in fieldlist: obj0 = fieldlist[_grism][fieldlist[_grism].keys()[0]][0] # ############# arc ######################### if _grism not in lamps: print '\n### take arc from archive ' arcfile = ntt.util.searcharc(obj0, '')[0] if arcfile[0] == '/': os.system('cp ' + arcfile + ' ' + string.split(arcfile, '/')[-1]) arcfile = string.split(arcfile, '/')[-1] lamps[_grism] = [arcfile] if _grism in lamps: arclist = lamps[_grism] if arclist: arcfile = ntt.util.searcharc(obj0, arclist)[0] else: arcfile = ntt.util.searcharc(obj0, '')[0] print arcfile if arcfile: print arcfile datea = readkey3(readhdr(arcfile), 'date-night') if arcfile[0] == '/': os.system('cp ' + arcfile + ' ' + string.split(arcfile, '/')[-1]) arcfile = string.split(arcfile, '/')[-1] if _doflat: if listflat: flat0 = ntt.util.searchflat(arcfile, listflat)[0] else: flat0 = '' else: flat0 = '' if flat0: _flatcor = 'yes' else: _flatcor = 'no' _doflat = False ntt.util.delete('arc_' + datea + '_' + _grism + '_' + str(MJDtoday) + '.fits') print arcfile, flat0, _flatcor, _doflat if _doflat: iraf.noao.imred.ccdred.ccdproc(arcfile, output='arc_' + datea + '_' + _grism + '_' + str(MJDtoday) + '.fits', overscan='no', trim='no', zerocor='no', flatcor=_flatcor, flat=flat0) else: os.system('cp ' + arcfile + ' ' + 'arc_' + datea + '_' + _grism + '_' + str(MJDtoday) + '.fits') iraf.noao.imred.ccdred.ccdproc('arc_' + datea + '_' + _grism + '_' + str(MJDtoday) + '.fits', output='', overscan='no', trim='yes', zerocor='no', flatcor='no', flat='', trimsec='[30:1000,1:1024]') arcfile = 'arc_' + datea + '_' + \ _grism + '_' + str(MJDtoday) + '.fits' ntt.util.correctcard(arcfile) print arcfile if arcfile not in outputlist: outputlist.append(arcfile) ntt.util.updateheader(arcfile, 0, {'FILETYPE': [41104, 'pre-reduced 2D arc'], 'SINGLEXP': [True, 'TRUE if resulting from single exposure'], 'M_EPOCH': [False, 'TRUE if resulting from multiple epochs'], 'PROV1': [readkey3(readhdr(arcfile), 'ARCFILE'), 'Originating file'], 'TRACE1': [readkey3(readhdr(arcfile), 'ARCFILE'), 'Originating file']}) arcref = ntt.util.searcharc(obj0, '')[0] if not arcref: identific = iraf.longslit.identify(images=arcfile, section='column 10', coordli='direc$standard/ident/Lines_XeAr_SOFI.dat', nsum=10, fwidth=7, order=3, mode='h', Stdout=1, verbose='yes') else: print arcref os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] if not os.path.isdir('database/'): os.mkdir('database/') if os.path.isfile(ntt.util.searcharc(obj0, '')[1] + '/database/id' + re.sub('.fits', '', arcref)): os.system('cp ' + ntt.util.searcharc(obj0, '')[1] + '/database/id' + re.sub('.fits', '', arcref) + ' database/') print arcref, arcfile # time.sleep(5) # os.system('rm -rf database/idarc_20130417_GR_56975') # raw_input('ddd') identific = iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac='NO', # _interact, section='column 10', shift=0.0, coordli='direc$standard/ident/Lines_XeAr_SOFI.dat', overrid='yes', step=0, newaps='no', nsum=5, nlost=2, mode='h', verbose='yes', Stdout=1) # print identific # raw_input('ddd') identific = iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac=_interact, section='column 10', shift=1.0, coordli='direc$standard/ident/Lines_XeAr_SOFI.dat', overrid='yes', step=0, newaps='no', nsum=5, nlost=2, mode='h', verbose='yes', Stdout=1) # fitsfile = ntt.efoscspec2Ddef.continumsub('new3.fits', 6, 1) # I need to run twice I don't know why # print identific # raw_input('ddd') if _interactive: answ = raw_input( '\n### do you like the identification [[y]/n]') if not answ: answ = 'y' else: answ = 'y' if answ in ['n', 'N', 'no', 'NO', 'No']: yy1 = pyfits.open(arcref)[0].data[:, 10:20].mean(1) xx1 = arange(len(yy1)) yy2 = pyfits.open(arcfile)[0].data[:, 10:20].mean(1) xx2 = arange(len(yy2)) ntt.util.delete('_new3.fits') hdu = pyfits.PrimaryHDU(yy1) hdulist = pyfits.HDUList([hdu]) hdulist.writeto('_new3.fits') fitsfile = ntt.efoscspec2Ddef.continumsub('_new3.fits', 4, 1) yy1 = pyfits.open(fitsfile)[0].data ntt.util.delete('_new3.fits') hdu = pyfits.PrimaryHDU(yy2) hdulist = pyfits.HDUList([hdu]) hdulist.writeto('_new3.fits') fitsfile = ntt.efoscspec2Ddef.continumsub('_new3.fits', 4, 1) yy2 = pyfits.open(fitsfile)[0].data _shift = ntt.efoscspec2Ddef.checkwavelength_arc( xx1, yy1, xx2, yy2, '', '') * (-1) print arcref, arcfile, _shift identific = iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac='YES', section='column 10', shift=_shift, coordli='direc$standard/ident/Lines_XeAr_SOFI.dat', overrid='yes', step=0, newaps='no', nsum=5, nlost=2, mode='h', verbose='yes', Stdout=1) answ = raw_input('\n### is it ok now ? [[y]/n] ') if not answ: answ = 'y' if answ in ['n', 'N', 'no', 'NO', 'No']: sys.exit( '\n### Warning: line identification with some problems') iraf.longslit.reidentify(referenc=arcfile, images=arcfile, interac='NO', section='column 10', coordli='direc$standard/ident/Lines_XeAr_SOFI.dat', overrid='yes', step=10, newaps='yes', nsum=5, nlost=2, mode='h', verbose='no') iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile), fitname=re.sub('.fits', '', arcfile), interac='no', combine='yes', databas='database', function='legendre', yorder=4, logfile='', plotfil='', mode='h') if identific: _rms = float(identific[-1].split()[-1]) _num = float(identific[-1].split()[2].split('/')[0]) hdr = ntt.util.readhdr(arcfile) hedvec = {'LAMRMS': [_rms * .1, 'residual RMS [nm]'], 'LAMNLIN': [_num, 'Nb of arc lines used in the fit of the wavel. solution'], 'SPEC_ERR': [(_rms * .1) / sqrt(float(_num)), 'statistical uncertainty'], 'SPEC_SYE': [0.1, 'systematic error']} ntt.util.updateheader(arcfile, 0, hedvec) else: sys.exit('Warning: arcfile not found') else: print 'here' # ######################################################################################################## for field in fieldlist[_grism]: listaobj = fieldlist[_grism][field] listaobj = ntt.sofiphotredudef.sortbyJD(listaobj) listatemp = listaobj[:] # ############## flat ###################### if listflat and _doflat: flat0 = ntt.util.searchflat(listaobj[0], listflat)[0] else: flat0 = '' if flat0: _flatcor = 'yes' else: _flatcor = 'no' ########## crosstalk ########################### listatemp2 = [] _date = readkey3(readhdr(listatemp[0]), 'date-night') for img in listatemp: # num2=listatemp.index(listasub[j]) imgout = field + '_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str( listatemp.index(img)) + '.fits' print '\n### input image: ' + str(img) delete(imgout) listatemp2.append(imgout) if _docross: print '### correct for cross talk ..... done' ntt.sofiphotredudef.crosstalk(img, imgout) correctcard(imgout) ntt.util.updateheader( imgout, 0, {'CROSSTAL': ['True', '']}) else: os.system('cp ' + img + ' ' + imgout) correctcard(imgout) if _flatcor == 'yes': print '### correct for flat field ..... done' try: iraf.noao.imred.ccdred.ccdproc(imgout, output='', overscan='no', trim='no', zerocor='no', flatcor=_flatcor, flat=flat0) except: iraf.imutil.imreplace( images=flat0, value=0.01, lower='INDEF', upper=0.01, radius=0) iraf.noao.imred.ccdred.ccdproc(imgout, output='', overscan='no', trim='no', zerocor='no', flatcor=_flatcor, flat=flat0) iraf.noao.imred.ccdred.ccdproc(imgout, output='', overscan='no', trim='yes', zerocor='no', flatcor='no', flat='', trimsec='[30:1000,1:1024]') ntt.util.updateheader( imgout, 0, {'FLATCOR': [flat0, 'flat correction']}) if imgout not in outputlist: outputlist.append(imgout) ntt.util.updateheader(imgout, 0, {'FILETYPE': [42104, 'pre-reduced frame'], 'SINGLEXP': [True, 'TRUE if resulting from single exposure'], 'M_EPOCH': [False, 'TRUE if resulting from multiple epochs'], 'PROV1': [readkey3(readhdr(imgout), 'ARCFILE'), 'Originating file'], 'TRACE1': [readkey3(readhdr(imgout), 'ARCFILE'), 'Originating file']}) print '### output image: ' + str(imgout) listatemp = listatemp2[:] ######### differences object images ##################### listasub = ntt.sofispec2Ddef.findsubimage(listatemp) reduced = [] print '\n### Select Frames to be subtracted (eg A-B, B-A, C-D, D-C, ....) ' print '### frame1 \t frame2 \t offset1 \t offset2 \t JD1 \t JD2\n' if len(listatemp) >= 2 and len(listasub) >= 2: for j in range(0, len(listatemp)): print '### ', listatemp[j], listasub[j], str(readkey3(readhdr(listatemp[j]), 'xcum')), str( readkey3(readhdr(listasub[j]), 'xcum')), \ str(readkey3(readhdr(listatemp[j]), 'JD')), str( readkey3(readhdr(listatemp[j]), 'JD')) if _interactive: answ = raw_input('\n### ok [[y]/n] ? ') if not answ: answ = 'y' else: answ = 'y' num1 = j image1 = listatemp[j] _date = readkey3(readhdr(image1), 'date-night') if answ == 'y': num2 = listatemp.index(listasub[j]) image2 = listasub[j] else: image2 = raw_input( 'which image do you want to subtract') num2 = listatemp.index(image2) imgoutsub = field + '_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str( num1) + '_' + str(num2) + '.fits' delete(imgoutsub) iraf.images.imutil.imarith( operand1=image1, op='-', operand2=image2, result=imgoutsub, verbose='no') ntt.util.updateheader(imgoutsub, 0, {'skysub': [image2, 'sky image subtracted'], 'FILETYPE': [42115, 'pre-reduced frame sky subtracted'], 'TRACE1': [image1, 'Originating file'], 'PROV2': [readkey3(readhdr(image2), 'ARCFILE'), 'Originating file'], 'TRACE2': [image2, 'Originating file']}) reduced.append(imgoutsub) if imgoutsub not in outputlist: outputlist.append(imgoutsub) ######################## 2D wavelengh calibration ######## for img in reduced: if arcfile: hdra = ntt.util.readhdr(arcfile) delete('t' + img) iraf.specred.transform(input=img, output='t' + img, minput='', fitnames=re.sub('.fits', '', arcfile), databas='database', x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h', logfile='logfile') ntt.util.updateheader('t' + img, 0, {'ARC': [arcfile, ''], 'FILETYPE': [42106, 'wavelength calibrate 2D frames'], 'TRACE1': [img, 'Originating file']}) ntt.util.updateheader( 't' + img, 0, {'TRACE1': [img, 'Originating file']}) ntt.util.updateheader('t' + img, 0, {'LAMRMS': [ntt.util.readkey3(hdra, 'LAMRMS'), 'residual RMS [nm]'], 'LAMNLIN': [ntt.util.readkey3(hdra, 'LAMNLIN'), 'number of arc lines'], 'SPEC_ERR': [ntt.util.readkey3(hdra, 'SPEC_ERR'), 'statistical uncertainty'], 'SPEC_SYE': [ntt.util.readkey3(hdra, 'SPEC_SYE'), 'systematic error']}) ########################### delete('t' + arcfile) iraf.specred.transform(input=arcfile, output='t' + arcfile, minput='', fitnames=re.sub('.fits', '', arcfile), databas='database', x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h', logfile='logfile') specred = ntt.util.spectraresolution2(arcfile, 50) if specred: ntt.util.updateheader( 't' + img, 0, {'SPEC_RES': [specred, 'Spectral resolving power']}) delete('t' + arcfile) ########################### iraf.hedit('t' + img, 'TRACE2', delete='yes', update='yes', verify='no', Stdout=1) if 't' + img not in outputlist: outputlist.append('t' + img) print '\n### 2D frame t' + str(img) + ' wavelengh calibrated ............ done' _skyfile = ntt.__path__[ 0] + '/standard/ident/sky_' + _grism + '.fits' # check in wavelengh ######### hdr = ntt.util.readhdr(img) if glob.glob(_skyfile) and readkey3(hdr, 'exptime') > 20.: _original = readkey3(hdr, 'ORIGFILE') _archive = readkey3(hdr, 'ARCFILE') if os.path.isfile(_archive): imgstart = _archive elif os.path.isfile(_original): imgstart = _original else: imgstart = '' if imgstart: delete('_tmp.fits') print imgstart, arcfile iraf.specred.transform(input=imgstart, output='_tmp.fits', minput='', fitnames=re.sub('.fits', '', arcfile), databas='database', x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h', logfile='logfile') shift = ntt.sofispec2Ddef.skysofifrom2d('_tmp.fits', _skyfile) zro = pyfits.open('_tmp.fits')[0].header.get('CRVAL2') delete('_tmp.fits') if _interactive: answ = raw_input( 'do you want to correct the wavelengh calibration with this shift: ' + str( shift) + ' [[y]/n] ? ') if not answ: answ = 'y' else: answ = 'y' if answ.lower() in ['y', 'yes']: ntt.util.updateheader('t' + img, 0, {'CRVAL2': [zro + int(shift), ''], 'shift': [float(shift), '']}) # ntt.util.updateheader('t'+img,0,{'shift':[float(shift),'']}) print '\n### check wavelengh calibration with sky lines ..... done' try: hdrt = ntt.util.readhdr('t' + img) wavelmin = float(readkey3(hdrt, 'CRVAL2')) + (0.5 - float(readkey3(hdrt, 'CRPIX2'))) * float( readkey3(hdrt, 'CDELT2')) wavelmax = float(readkey3(hdrt, 'CRVAL2')) + ( (float(readkey3(hdrt, 'NAXIS2')) + 0.5 - float(readkey3(hdrt, 'CRPIX2'))) * float( readkey3(hdrt, 'CDELT2'))) hedvec = {} hedvec['WAVELMIN'] = [ wavelmin * .1, '[nm] minimum wavelength'] hedvec['WAVELMAX'] = [ wavelmax * .1, ' [nm] maximum wavelength'] hedvec['XMIN'] = [wavelmin, '[A] minimum wavelength'] hedvec['XMAX'] = [wavelmax, '[A] maximum wavelength'] hedvec['SPEC_BW'] = [ (wavelmax * .1) - (wavelmin * .1), '[nm] Bandpass Width Wmax - Wmin'] hedvec['SPEC_VAL'] = [ ((wavelmax * .1) + (wavelmin * .1)) / 2., '[nm] Mean Wavelength'] hedvec['SPEC_BIN'] = [ ((wavelmax * .1) - (wavelmin * .1)) / (float(readkey3(hdr, 'NAXIS2')) - 1), 'Wavelength bin size [nm/pix]'] hedvec['VOCLASS'] = ['SPECTRUM V1.0', 'VO Data Model'] hedvec['VOPUB'] = ['ESO/SAF', 'VO Publishing Authority'] # hedvec['APERTURE']=[float(re.sub('slit','',readkey3(hdrt,'slit'))),'aperture width'] ntt.util.updateheader('t' + img, 0, hedvec) except: pass else: print '\n### Warning: arc not found for the image ' + str(img) + ' with setup ' + str(_grism) reduceddata = rangedata(outputlist) print '\n### adding keywords for phase 3 ....... ' f = open('logfile_spec2d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list', 'w') for img in outputlist: if img[-4:] == 'fits': hdr = readhdr(img) # ############################################### # cancel pc matrix if 'PC1_1' in hdr.keys(): aaa = iraf.hedit(img, 'PC1_1', delete='yes', update='yes', verify='no', Stdout=1) if 'PC2_2' in hdr.keys(): aaa = iraf.hedit(img, 'PC2_2', delete='yes', update='yes', verify='no', Stdout=1) if 'PC1_2' in hdr.keys(): aaa = iraf.hedit(img, 'PC1_2', delete='yes', update='yes', verify='no', Stdout=1) if 'PC2_1' in hdr.keys(): aaa = iraf.hedit(img, 'PC2_1', delete='yes', update='yes', verify='no', Stdout=1) ################# # added for DR2 print img if 'NCOMBINE' in hdr: _ncomb = readkey3(hdr, 'NCOMBINE') else: _ncomb = 1.0 ntt.util.updateheader( img, 0, {'DETRON ': [12, 'Readout noise per output (e-)']}) ntt.util.updateheader(img, 0, {'EFFRON': [12. * (1 / sqrt(readkey3(hdr, 'ndit') * _ncomb)) * sqrt(pi / 2), 'Effective readout noise per output (e-)']}) ntt.util.phase3header(img) # phase 3 definitions ############################ # change for DR2 ############################ texp = float(readkey3(hdr, 'dit')) * float(readkey3(hdr, 'ndit')) mjdend = float(readkey3(hdr, 'MJD-OBS')) + (float(readkey3(hdr, 'ndit')) * ( float(readkey3(hdr, 'dit')) + 1.8)) / (60. * 60. * 24.) strtexp = time.strftime('%H:%M:%S', time.gmtime(texp)) _telapse = (mjdend - float(readkey3(hdr, 'MJD-OBS'))) * \ 60. * 60 * 24. # tmid=_telapse/2. tmid = (mjdend + float(readkey3(hdr, 'MJD-OBS'))) / 2 ntt.util.updateheader(img, 0, {'quality': ['Final', 'fast or rapid reduction'], 'BUNIT': ['ADU', 'Physical unit of array values'], 'DIT': [readkey3(hdr, 'dit'), 'Detector Integration Time'], 'NDIT': [readkey3(hdr, 'ndit'), 'Number of sub-integrations'], 'TEXPTIME': [texp, 'Total integration time of all exposures (s)'], 'EXPTIME': [texp, 'Total integration time. ' + strtexp], 'MJD-END': [mjdend, 'End of observations (days)'], 'TELAPSE': [_telapse, 'Total elapsed time [days]'], 'TMID': [tmid, '[d] MJD mid exposure'], 'TITLE': [readkey3(hdr, 'object'), 'Dataset title'], #'TITLE':[str(tmid)[0:9]+' '+str(readkey3(hdr,'object'))+' '+str(readkey3(hdr,'grism'))+' '+\ # str(readkey3(hdr,'filter'))+' # '+str(readkey3(hdr,'slit')),'Dataset # title'],\ 'EXT_OBJ': [False, 'TRUE if extended'], 'CONTNORM': [False, 'spectrum normalized to the continuum'], 'TOT_FLUX': [False, 'TRUE if phot cond and all src flux is captured'], 'SPECSYS': ['TOPOCENT', 'Reference frame for spectral coordinate'], 'FLUXCAL': ['ABSOLUTE', 'type of flux calibration'], 'FLUXERR': [34.7, 'Fractional uncertainty of the flux [%]'], 'DISPELEM': ['Gr#' + re.sub('Gr', '', readkey3(hdr, 'grism')), 'Dispersive element name']}) if readkey3(hdr, 'tech'): ntt.util.updateheader( img, 0, {'PRODCATG': ['SCIENCE.IMAGE', 'Data product category']}) aaa = str(readkey3(hdr, 'arcfiles')) + '\n' f.write(aaa) try: ntt.util.airmass(img) # phase 3 definitions except: print '\n### airmass not computed for image: ', img else: print img + ' is not a fits image' f.close() return outputlist, 'logfile_spec2d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list'
from scipy.optimize import leastsq,fsolve#,fmin_slsqp from scipy.integrate import quad from scipy.stats import * from datetime import datetime, date, time import matplotlib.pyplot as plt from datetime import timedelta from numpy.random import randint import pyfits iraf.tables() iraf.noao() iraf.imred() iraf.twodspec() iraf.onedspec() iraf.ccdred() iraf.apextract() iraf.longslit() iraf.plot() iraf.stsdas() iraf.nebular() from time import strftime ############################################################# ########Setup a list of lines and fitting regions ############################################################# #the list of lines linenames=[3727,4102,4340,4363,4686,4861,4959,5007,5755,5876,6548,6562,6584,6717,6731] elines=array([.5*(3727.092+3729.875),4102.89,4341.68,4364.436,4686,4862.68,4960.295,5008.240,5754.59,5876,6549.86,6564.61,6585.27,6718.29,6732.67]) #define a preferred background region width bgsize=20
def identify_edge(infile, overwrite=False): print('\n#############################') print('Identifying the edges.') binfct1 = fits.getval(infile, 'BIN-FCT1') coordlist = fi.filibdir + 'edge' + str(binfct1) + '.dat' section = 'middle line' verbose = 'yes' nsum = 50 match = -10. fwidth = 6. / binfct1 cradius = 20. / binfct1 threshold = 0. function = 'chebyshev' order = 2 niter = 0 autowrite = 'yes' newaps = 'yes' override = 'yes' refit = 'no' trace = 'yes' step = 50 shift = 0 nlost = 0 minsep = 60. / binfct1 addfeatures = 'no' database = 'database' logfile = 'identify_edge.log' # Not to display items in IRAF packages sys.stdout = open('/dev/null', 'w') iraf.noao() iraf.twodspec() iraf.longslit() sys.stdout = sys.__stdout__ # Back to the stadard output # entering the channel image directory. # os.chdir() does not change the directory for pyraf only in this function. print('\t Entering the channel image directory, \"' + fi.chimagedir + '\".') iraf.cd(fi.chimagedir) basename = fits.getval('../' + infile, 'FRAMEID') idfile = database + '/id' + basename + '.ch01edge' if os.path.isfile(idfile) and not overwrite: print('\t Edge identification files already exist, ' + idfile \ + '. Skipping.') else: if os.path.isfile(idfile) and overwrite: print('\t Removing ' + idfile) try: os.remove(idfile) except: pass print('\t Identifying: ' + basename + '.ch01edge.fits') iraf.identify(basename + '.ch01edge', section=section, database=database, coordlist=coordlist, units='', nsum=nsum, match=match, maxfeat=2, ftype='emission', fwidth=fwidth, cradius=cradius, threshold=threshold, function=function, order=order, sample='*', niter=niter, autowrite=autowrite) print('\t Reidentifying: ' + basename + '.ch01edge.fits') iraf.reidentify(basename + '.ch01edge', basename + '.ch01edge', interac='no', section=section, newaps=newaps, override=override, refit=refit, trace=trace, step=step, nsum=nsum, shift=shift, nlost=nlost, cradius=cradius, threshold=threshold, addfeatures=addfeatures, coordlist=coordlist, match=match, maxfeat=2, minsep=minsep, database=database, logfile=logfile, plotfile='', verbose=verbose, cursor='') for i in range(2, 25): print('\t Reidentifying: ' + basename + '.ch%02dedge.fits' % i) idfile = database + '/id' + basename + '.ch%02dedge' % i if os.path.isfile(idfile) and not overwrite: print('\t Edge identification files already exist, ' + idfile + '. Skipping.') else: if os.path.isfile(idfile) and overwrite: print('\t Removing ' + idfile) try: os.remove(idfile) except: pass # treatment for VPH650 if i == 12: disperser = fits.getval(basename + '.ch12edge.fits', 'DISPERSR') if disperser == 'SCFCGRHD65': nlost = 1 if i == 13: disperser = fits.getval(basename + '.ch12edge.fits', 'DISPERSR') if disperser == 'SCFCGRHD65': nlost = 0 iraf.reidentify(basename+'.ch%02dedge'%(i-1), \ basename+'.ch%02dedge'%i, \ interac='no', section=section, newaps=newaps, \ override=override, refit=refit, trace=trace, \ step=0.0, nsum=nsum, shift=shift, nlost=nlost, \ cradius=cradius, threshold=threshold, \ addfeatures=addfeatures, coordlist=coordlist, \ match=match, maxfeat=2, minsep=minsep, \ database=database, logfile=logfile, \ plotfile='', verbose=verbose, cursor='') #Check the result iraf.identify(basename+'.ch%02dedge'%i, section=section, \ database=database, coordlist=coordlist, units='', \ nsum=nsum, match=match, maxfeat=2,ftype='emission', \ fwidth=fwidth, cradius=cradius, threshold=threshold, \ function=function, order=order, sample='*', \ niter=niter, autowrite=autowrite) iraf.reidentify(basename+'.ch%02dedge'%i, \ basename+'.ch%02dedge'%i, \ interac='no', section=section, newaps=newaps, \ override=override, refit=refit, trace=trace, \ step=step, nsum=nsum, shift=shift, nlost=nlost, \ cradius=cradius, threshold=threshold, \ addfeatures=addfeatures, coordlist=coordlist, \ match=match, maxfeat=2, minsep=minsep, \ database=database, logfile=logfile, \ plotfile='', verbose=verbose, cursor='') print('\t Go back to the original directory.') iraf.cd('..') disperser = fits.getval(fi.chimagedir + basename + '.ch12edge.fits', 'DISPERSR') if disperser == 'SCFCGRHD65': correct_ch12_edge(basename, overwrite=overwrite) return
def reduce(imglist,files_arc, _cosmic, _interactive_extraction,_arc): import string import os import re import sys os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits import numpy as np import util import instruments import combine_sides as cs import cosmics from pyraf import iraf dv = util.dvex() scal = np.pi / 180. if not _interactive_extraction: _interactive = False else: _interactive = True if not _arc: _arc_identify = False else: _arc_identify = True iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) iraf.disp(inlist='1', reference='1') toforget = ['ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext'] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' iraf.specred.verbose = 'no' iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" list_arc_b = [] list_arc_r = [] for arcs in files_arc: hdr = util.readhdr(arcs) if util.readkey3(hdr, 'VERSION') == 'kastb': list_arc_b.append(arcs) elif util.readkey3(hdr, 'VERSION') == 'kastr': list_arc_r.append(arcs) else: print util.readkey3(hdr, 'VERSION') + 'not in database' sys.exit() asci_files = [] newlist = [[],[]] print '\n### images to reduce :',imglist #raise TypeError for img in imglist: if 'b' in img: newlist[0].append(img) elif 'r' in img: newlist[1].append(img) if len(newlist[1]) < 1: newlist = newlist[:-1] for imgs in newlist: hdr = util.readhdr(imgs[0]) if util.readkey3(hdr, 'VERSION') == 'kastb': inst = instruments.kast_blue elif util.readkey3(hdr, 'VERSION') == 'kastr': inst = instruments.kast_red else: print util.readkey3(hdr, 'VERSION') + 'not in database' sys.exit() iraf.specred.dispaxi = inst.get('dispaxis') iraf.longslit.dispaxi = inst.get('dispaxis') _gain = inst.get('gain') _ron = inst.get('read_noise') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain _object0 = util.readkey3(hdr, 'OBJECT') _date0 = util.readkey3(hdr, 'DATE-OBS') _biassec0 = inst.get('biassec') _trimsec0 = inst.get('trimsec') _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = str(_object0) + '_' + inst.get('name') + '_' + str(_date0) nameout0 = util.name_duplicate(imgs[0], nameout0, '') timg = nameout0 print '\n### now processing :',timg,' for -> ',inst.get('name') if len(imgs) > 1: img_str = '' for i in imgs: img_str = img_str + i + ',' iraf.imcombine(img_str, output=timg) else: img = imgs[0] if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) zero_file = inst.get('archive_zero_file') os.system('cp ' + zero_file + ' .') zero_file = string.split(zero_file, '/')[-1] flat_file = inst.get('archive_flat_file') os.system('cp ' + flat_file + ' .') flat_file = string.split(flat_file, '/')[-1] iraf.ccdproc(timg, output='', overscan='yes', trim='yes', zerocor="no", flatcor="no", readaxi='line', trimsec=str(_trimsec0),biassec=str(_biassec0), Stdout=1) iraf.ccdproc(timg, output='', overscan='no', trim='no', zerocor="yes", flatcor="no", readaxi='line', zero=zero_file,order=3, Stdout=1) iraf.ccdproc(timg, output='', overscan='no', trim='no', zerocor="no", flatcor="yes", readaxi='line', flat=flat_file, Stdout=1) img = timg #raw_input("Press Enter to continue...") print '\n### starting cosmic removal' if _cosmic: array, header = cosmics.fromfits(img) c = cosmics.cosmicsimage(array, gain=inst.get('gain'), readnoise=inst.get('read_noise'), sigclip = 4.5, sigfrac = 0.5, objlim = 1.0) c.run(maxiter = 4) cosmics.tofits('cosmic_' + img, c.cleanarray, header) print '\n### cosmic removal finished' img='cosmic_' + img if inst.get('name') == 'kast_blue': arcfile = list_arc_b[0] elif inst.get('name') == 'kast_red': arcfile = list_arc_r[0] if not arcfile.endswith(".fits"): arcfile=arcfile+'.fits' if os.path.isfile(arcfile): util.delete('t' + arcfile) iraf.ccdproc(arcfile, output= 't' + arcfile, overscan='yes', trim='yes', zerocor="no", flatcor="no", readaxi='line', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) arcfile = 't' + arcfile else: print '\n### warning no arcfile \n exit ' sys.exit() if not os.path.isdir('database/'): os.mkdir('database/') if _arc_identify: arc_ex=re.sub('.fits', '.ms.fits', arcfile) print '\n### arcfile : ',arcfile print '\n### arcfile extraction : ',arc_ex iraf.specred.apall(arcfile, output='', line = 'INDEF', nsum=10, interactive='no', extract='yes',find='yes', nfind=1 ,format='multispec', trace='no',back='no',recen='no') iraf.longslit.identify(images=arc_ex, section=inst.get('section'),coordli=inst.get('line_list'),function = 'spline3',order=3, mode='h') else: arcref = inst.get('archive_arc_extracted') arcrefid = inst.get('archive_arc_extracted_id') os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] os.system('cp ' + arcrefid + ' ./database') arc_ex=re.sub('.fits', '.ms.fits', arcfile) print '\n### arcfile : ',arcfile print '\n### arcfile extraction : ',arc_ex print '\n### arc referenece : ',arcref iraf.specred.apall(arcfile, output=arc_ex, line = 'INDEF', nsum=10, interactive='no', extract='yes',find='yes', nfind=1 ,format='multispec', trace='no',back='no',recen='no') iraf.longslit.reidentify(referenc=arcref, images=arc_ex, interac='NO', section=inst.get('section'), coordli=inst.get('line_list'), shift='INDEF', search='INDEF', mode='h', verbose='YES', step=0,nsum=5, nlost=2, cradius=10, refit='yes',overrid='yes',newaps='no') #print '\n### checking sky lines ' #_skyfile = inst.get('sky_file') #shift = util.skyfrom2d(img, _skyfile,'True') #print '\n### I found a shift of : ',shift print '\n### extraction using apall' result = [] hdr_image = util.readhdr(img) _type=util.readkey3(hdr_image, 'object') if _type.startswith("arc") or _type.startswith("dflat") or _type.startswith("Dflat") or _type.startswith("Dbias") or _type.startswith("Bias"): print '\n### warning problem \n exit ' sys.exit() else: imgex = util.extractspectrum( img, dv, inst, _interactive, 'obj') print '\n### applying wavelength solution' iraf.disp(inlist=imgex, reference=arc_ex) sensfile = inst.get('archive_sens') os.system('cp ' + sensfile + ' .') sensfile = string.split(sensfile, '/')[-1] if sensfile: print '\n### sensitivity function : ',sensfile imgf = re.sub('.fits', '_f.fits', img) _extinction = inst.get('extinction_file') _observatory = inst.get('observatory') _exptime = util.readkey3(hdr, 'EXPTIME') _airmass = util.readkey3(hdr, 'AIRMASS') util.delete(imgf) dimgex='d'+imgex iraf.specred.calibrate(input=dimgex, output=imgf, sensiti=sensfile, extinct='yes', extinction=_extinction,flux='yes', ignorea='yes', airmass=_airmass, exptime=_exptime, fnu='no') imgout = imgf imgasci = re.sub('.fits', '.asci', imgout) errasci = re.sub('.fits', '_err.asci', imgout) util.delete(imgasci) iraf.onedspec.wspectext(imgout + '[*,1,1]', imgasci, header='no') iraf.onedspec.wspectext(imgout + '[*,1,4]', errasci, header='no') spec = np.transpose(np.genfromtxt(imgasci)) err = np.transpose(np.genfromtxt(errasci)) util.delete(errasci) final = np.transpose([spec[0], spec[1], err[1]]) np.savetxt(imgasci, final) result = result + [imgout, imgasci] result = result + [imgex] + [timg] asci_files.append(imgasci) if not os.path.isdir(_object0 + '/'): os.mkdir(_object0 + '/') for img in result: os.system('mv ' + img + ' ' + _object0 + '/') else: for img in result: os.system('mv ' + img + ' ' + _object0 + '/') if not _arc_identify: util.delete(arcref) util.delete(sensfile) util.delete(zero_file) util.delete(flat_file) util.delete(arc_ex) util.delete(arcfile) util.delete('logfile') util.delete(dimgex) util.delete('cosmic_*') print '\n### now i will merge ...' if len(asci_files) > 1: final = cs.combine_blue_red(asci_files[0], asci_files[1], _object0) print '\n### final result in folder ',_object0,' is ',_object0+'_merged.asci' return result
def telluric_atmo(imgstd): # print "LOGX:: Entering `telluric_atmo` method/function in %(__file__)s" # % globals() import numpy as np import ntt from pyraf import iraf try: import pyfits except: from astropy.io import fits as pyfits iraf.images(_doprint=0) iraf.noao(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) toforget = ['imfilter.gauss', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'onedspec.wspectext'] for t in toforget: iraf.unlearn(t) _grism = ntt.util.readkey3(ntt.util.readhdr(imgstd), 'grism') imgout = 'invers_atmo_' + imgstd ntt.util.delete(imgout) iraf.set(direc=ntt.__path__[0] + '/') _cursor = 'direc$standard/ident/cursor_sky_0' iraf.noao.onedspec.bplot(imgstd, cursor=_cursor, spec2=imgstd, new_ima=imgout, overwri='yes') xxstd, ffstd = ntt.util.readspectrum(imgout) if _grism in ['Gr13', 'Gr16']: llo2 = np.compress((np.array(xxstd) >= 7550) & ( np.array(xxstd) <= 7750), np.array(xxstd)) llh2o = np.compress((np.array(xxstd) >= 7100) & ( np.array(xxstd) <= 7500), np.array(xxstd)) ffo2 = np.compress((np.array(xxstd) >= 7550) & ( np.array(xxstd) <= 7750), np.array(ffstd)) ffh2o = np.compress((np.array(xxstd) >= 7100) & ( np.array(xxstd) <= 7500), np.array(ffstd)) elif _grism in ['Gr11']: llo2 = np.compress((np.array(xxstd) >= 6830) & ( np.array(xxstd) <= 7100), np.array(xxstd)) llh2o = np.compress((np.array(xxstd) >= 7100) & ( np.array(xxstd) <= 7500), np.array(xxstd)) ffo2 = np.compress((np.array(xxstd) >= 6830) & ( np.array(xxstd) <= 7100), np.array(ffstd)) ffh2o = np.compress((np.array(xxstd) >= 7100) & ( np.array(xxstd) <= 7500), np.array(ffstd)) if _grism in ['Gr13', 'Gr16', 'Gr11']: _skyfileh2o = 'direc$standard/ident/ATLAS_H2O.fits' _skyfileo2 = 'direc$standard/ident/ATLAS_O2.fits' atlas_smooto2 = '_atlas_smoot_o2.fits' atlas_smooth2o = '_atlas_smoot_h2o.fits' _sigma = 200 ntt.util.delete(atlas_smooto2) ntt.util.delete(atlas_smooth2o) iraf.imfilter.gauss(_skyfileh2o, output=atlas_smooth2o, sigma=_sigma) iraf.imfilter.gauss(_skyfileo2, output=atlas_smooto2, sigma=_sigma) llskyh2o, ffskyh2o = ntt.util.readspectrum(atlas_smooth2o) llskyo2, ffskyo2 = ntt.util.readspectrum(atlas_smooto2) ffskyo2cut = np.interp(llo2, llskyo2, ffskyo2) ffskyh2ocut = np.interp(llh2o, llskyh2o, ffskyh2o) _scaleh2o = [] integral_h2o = [] for i in range(1, 21): j = 0.6 + i * 0.04 _ffskyh2ocut = list((np.array(ffskyh2ocut) * j) + 1 - j) diff_h2o = abs(_ffskyh2ocut - ffh2o) integraleh2o = np.trapz(diff_h2o, llh2o) integral_h2o.append(integraleh2o) _scaleh2o.append(j) _scaleo2 = [] integral_o2 = [] for i in range(1, 21): j = 0.6 + i * 0.04 _ffskyo2cut = list((np.array(ffskyo2cut) * j) + 1 - j) diff_o2 = abs(_ffskyo2cut - ffo2) integraleo2 = np.trapz(diff_o2, llo2) integral_o2.append(integraleo2) _scaleo2.append(j) sh2o = _scaleh2o[np.argmin(integral_h2o)] so2 = _scaleo2[np.argmin(integral_o2)] telluric_features = ((np.array(ffskyh2o) * sh2o) + 1 - sh2o) + ((np.array(ffskyo2) * so2) + 1 - so2) - 1 telluric_features = np.array([1] + list(telluric_features) + [1]) llskyo2 = np.array([1000] + list(llskyo2) + [15000]) telluric_features_cut = np.interp(xxstd, llskyo2, telluric_features) _imgout = 'atmo_' + imgstd data1, hdr = pyfits.getdata(imgstd, 0, header=True) data1[0] = np.array(telluric_features_cut) data1[1] = data1[1] / data1[1] data1[2] = data1[2] / data1[2] data1[3] = data1[3] / data1[3] ntt.util.delete(_imgout) pyfits.writeto(_imgout, np.float32(data1), hdr) ntt.util.delete(atlas_smooto2) ntt.util.delete(atlas_smooth2o) ntt.util.delete(imgout) else: _imgout = '' print '### telluric correction with model not possible ' return _imgout
def correct_airmass(): stdout = os.popen("ls Y*otbfmsw.fits").readlines() namelst = [i.split('\n')[0] for i in stdout] for i in xrange(len(namelst)): fits = pyfits.open(namelst[i]) extnum = len(fits) objname = fits[0].header['object'] fits.close() print '#' * 50 print namelst[i], objname name = raw_input('please input the name of object:') ra, dec = findradec(name) print name, ra, dec for j in xrange(extnum): stdout = iraf.hselect(images=namelst[i] + '[%i]' % j, fields='airmass', expr='yes', Stdout=1) airold = float(stdout[0]) print '+' * 5, namelst[i], 'ext:', j, 'airmass_old:', airold iraf.hedit(images=namelst[i] + '[%i]' % j, fields='airold', value=airold, add='yes', addonly='yes', delete='no', verify='no', show='yes', update='yes') iraf.hedit(images=namelst[i] + '[%i]' % j, fields='sname', value=name, add='yes', addonly='yes', delete='no', verify='no', show='yes', update='yes') iraf.hedit(images=namelst[i] + '[%i]' % j, fields='sname', value=name, add='yes', addonly='yes', delete='no', verify='no', show='yes', update='yes') iraf.hedit(images=namelst[i] + '[%i]' % j, fields='ra', value=ra, add='yes', addonly='yes', delete='no', verify='no', show='yes', update='yes') iraf.hedit(images=namelst[i] + '[%i]' % j, fields='dec', value=dec, add='yes', addonly='yes', delete='no', verify='no', show='yes', update='yes') iraf.twodspec() iraf.longslit(dispaxis=2, nsum=1, observatory='Lijiang', extinction='onedstds$LJextinct.dat', caldir='onedstds$spec50cal/') iraf.setairmass(images=namelst[i] + '[%i]' % j, observatory='Lijiang', intype='beginning', outtype='effective', ra='ra', dec='dec', equinox='epoch', st='lst', ut='date-obs', date='date-obs', exposure='exptime', airmass='airmass', utmiddle='utmiddle', scale=750.0, show='yes', override='yes', update='yes') print 'name airmass_new airmass_old' iraf.hselect(images=namelst[i] + '[%i]' % j, fields='$I,airmass,airold', expr='yes')
def efoscfastredu(imglist, _listsens, _listarc, _ext_trace, _dispersionline, _cosmic, _interactive): # print "LOGX:: Entering `efoscfastredu` method/function in %(__file__)s" # % globals() import string import os import re import sys os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits from ntt.util import readhdr, readkey3 import ntt import numpy as np dv = ntt.dvex() scal = np.pi / 180. if not _interactive: _interactive = False _inter = 'NO' else: _inter = 'YES' from pyraf import iraf iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) toforget = ['ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext'] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' # not print steps iraf.specred.verbose = 'no' # not print steps iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' _gain = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'gain') _ron = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'ron') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain iraf.specred.dispaxi = 2 iraf.longslit.dispaxi = 2 iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" iraf.set(direc=ntt.__path__[0] + '/') for img in imglist: hdr = ntt.util.readhdr(img) _tech = ntt.util.readkey3(hdr, 'tech') if _tech != 'SPECTRUM': sys.exit('error: ' + str(img) + ' is not a spectrum ') print '\n#### image name = ' + img + '\n' _grism0 = readkey3(hdr, 'grism') _filter0 = readkey3(hdr, 'filter') _slit0 = readkey3(hdr, 'slit') _object0 = readkey3(hdr, 'object') _date0 = readkey3(hdr, 'date-night') setup = (_grism0, _filter0, _slit0) _biassec0 = '[3:1010,1026:1029]' if _grism0 == 'Gr16': _trimsec0 = '[100:950,1:950]' elif _grism0 == 'Gr13': if _filter0 == 'Free': _trimsec0 = '[100:950,1:1015]' elif _filter0 == 'GG495': _trimsec0 = '[100:950,208:1015]' elif _filter0 == 'OG530': _trimsec0 = '[100:950,300:1015]' elif _grism0 == 'Gr11': _trimsec0 = '[100:950,5:1015]' else: _trimsec0 = '[100:950,5:1015]' _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = 't' + str(_object0) + '_' + str(_date0) for _set in setup: nameout0 = nameout0 + '_' + _set nameout0 = ntt.util.name_duplicate(img, nameout0, '') timg = nameout0 if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) iraf.ccdproc(timg, output='', overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=_biassec0, Stdout=1) img = timg if _listarc: arcfile = ntt.util.searcharc(img, _listarc)[0] else: arcfile = '' if not arcfile: arcfile = ntt.util.searcharc(img, '')[0] else: iraf.ccdproc(arcfile, output='t' + arcfile, overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) arcfile = 't' + arcfile if _cosmic: # print cosmic rays rejection ntt.cosmics.lacos(img, output='', gain=_gain, readn=_ron, xorder=9, yorder=9, sigclip=4.5, sigfrac=0.5, objlim=1, verbose=True, interactive=False) print '\n### cosmic rays rejections ........ done ' if not arcfile: print '\n### warning no arcfile \n exit ' else: arcref = ntt.util.searcharc(img, '')[0] if arcfile[0] == '/': os.system('cp ' + arcfile + ' ' + string.split(arcfile, '/')[-1]) arcfile = string.split(arcfile, '/')[-1] arcref = string.split(arcref, '/')[-1] if arcref: os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] if not os.path.isdir('database/'): os.mkdir('database/') if os.path.isfile(ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref)): os.system('cp ' + ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref) + ' database/') iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac=_inter, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=0, newaps='no', nsum=5, nlost=2, mode='h', verbose='no') else: iraf.longslit.identify(images=arcfile, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', nsum=10, fwidth=7, order=3, mode='h') iraf.longslit.reident(referenc=arcfile, images=arcfile, interac='NO', section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=10, newaps='yes', nsum=5, nlost=2, mode='h', verbose='no') qqq = iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile), fitname=re.sub('.fits', '', arcfile), interac='no', combine='yes', databas='database', function='legendre', yorder=4, logfile='logfile', plotfil='', mode='h') iraf.specred.transform(input=img, output=img, minput='', fitnames=re.sub('.fits', '', arcfile), databas='database', x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h', logfile='logfile') # ###################### check wavelength calibration ############ _skyfile = ntt.__path__[ 0] + '/standard/ident/sky_' + setup[0] + '_' + setup[1] + '.fits' shift = ntt.efoscspec2Ddef.skyfrom2d(img, _skyfile) print '\n### check in wavelengh performed ...... spectrum shifted of ' + str(shift) + ' Angstrom \n' zro = pyfits.open(img)[0].header.get('CRVAL2') ntt.util.updateheader(img, 0, {'CRVAL2': [zro + int(shift), '']}) std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') hdrt = readhdr(img) _ra = readkey3(hdrt, 'RA') _dec = readkey3(hdrt, 'DEC') _object = readkey3(hdrt, 'object') dd = np.arccos(np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos((_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' ntt.util.updateheader( img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) else: _type = 'obj' print '\n### EXTRACTION USING IRAF TASK APALL \n' result = [] if _type == 'obj': imgex = ntt.util.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, _type) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader(imgex, 0, { 'PRODCATG': ['SCIENCE.' + readkey3(readhdr(imgex), 'tech').upper(), 'Data product category']}) ntt.util.updateheader(imgex, 0, {'TRACE1': [img, '']}) result.append(imgex) if _listsens: sensfile = ntt.util.searchsens(img, _listsens)[0] else: sensfile = '' if not sensfile: sensfile = ntt.util.searchsens(img, '')[0] if sensfile: imgf = re.sub('.fits', '_f.fits', img) _extinctdir = 'direc$standard/extinction/' _extinction = 'extinction_lasilla.dat' _observatory = 'lasilla' _exptime = readkey3(hdrt, 'exptime') _airmass = readkey3(hdrt, 'airmass') ntt.util.delete(imgf) iraf.specred.calibrate(input=imgex, output=imgf, sensiti=sensfile, extinct='yes', flux='yes', ignorea='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, exptime=_exptime, fnu='no') hedvec = {'SENSFUN': [string.split(sensfile, '/')[-1], 'sensitivity function'], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum '], 'SNR': [ntt.util.StoN2(imgf, False), 'Average S/N ratio'], 'BUNIT': ['erg/cm2/s/Angstrom', 'Flux Calibration Units'], 'TRACE1': [imgex, '']} ntt.util.updateheader(imgf, 0, hedvec) imgout = imgf imgd = ntt.efoscspec1Ddef.fluxcalib2d(img, sensfile) ntt.util.updateheader( imgd, 0, {'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum ']}) ntt.util.updateheader(imgd, 0, {'TRACE1': [img, '']}) imgasci = re.sub('.fits', '.asci', imgout) ntt.util.delete(imgasci) iraf.onedspec.wspectext( imgout + '[*,1,1]', imgasci, header='no') result = result + [imgout, imgd, imgasci] else: imgex = ntt.util.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, 'std') imgout = ntt.efoscspec1Ddef.sensfunction( imgex, 'spline3', 6, _inter) result = result + [imgout] for img in result: if img[-5:] == '.fits': ntt.util.phase3header(img) # phase 3 definitions ntt.util.airmass(img) # phase 3 definitions ntt.util.updateheader( img, 0, {'quality': ['Rapid', 'Final or Rapid reduction']}) return result
def reduce(imglist, files_arc, files_flat, _cosmic, _interactive_extraction, _arc): import string import os import re import sys import pdb os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits import numpy as np import util import instruments import combine_sides as cs import cosmics from pyraf import iraf dv = util.dvex() scal = np.pi / 180. if not _interactive_extraction: _interactive = False else: _interactive = True if not _arc: _arc_identify = False else: _arc_identify = True iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) iraf.disp(inlist='1', reference='1') toforget = [ 'ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext' ] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' iraf.specred.verbose = 'no' iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" list_arc_b = [] list_arc_r = [] for arcs in files_arc: hdr = util.readhdr(arcs) br, inst = instruments.blue_or_red(arcs) if br == 'blue': list_arc_b.append(arcs) elif br == 'red': list_arc_r.append(arcs) else: errStr = '{} '.format(str(util.readkey3(hdr, 'VERSION'))) errStr += 'not in database' print(errStr) sys.exit() asci_files = [] newlist = [[], []] print('\n### images to reduce :', imglist) #raise TypeError for img in imglist: if 'b' in img: newlist[0].append(img) elif 'r' in img: newlist[1].append(img) if len(newlist[1]) < 1: newlist = newlist[:-1] elif len(newlist[0]) < 1: newlist = newlist[1:] else: sides = raw_input("Reduce which side? ([both]/b/r): ") if sides == 'b': newlist = newlist[:-1] elif sides == 'r': newlist = newlist[1:] for imgs in newlist: hdr = util.readhdr(imgs[0]) br, inst = instruments.blue_or_red(imgs[0]) if br == 'blue': flat_file = '../RESP_blue' elif br == 'red': flat_file = '../RESP_red' else: errStr = 'Not in intrument list' print(errStr) sys.exit() iraf.specred.dispaxi = inst.get('dispaxis') iraf.longslit.dispaxi = inst.get('dispaxis') _gain = inst.get('gain') _ron = inst.get('read_noise') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain _object0 = util.readkey3(hdr, 'OBJECT') _date0 = util.readkey3(hdr, 'DATE-OBS') _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = str(_object0) + '_' + inst.get('name') + '_' + str(_date0) nameout0 = util.name_duplicate(imgs[0], nameout0, '') timg = nameout0 print('\n### now processing :', timg, ' for -> ', inst.get('name')) if len(imgs) > 1: img_str = '' for i in imgs: img_str = img_str + i + ',' iraf.imcombine(img_str, output=timg) else: img = imgs[0] if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) # should just do this by hand iraf.ccdproc(timg, output='', overscan='no', trim='no', zerocor="no", flatcor="yes", readaxi='line', flat=flat_file, Stdout=1) img = timg #raw_input("Press Enter to continue...") if _cosmic: print('\n### starting cosmic removal') array, header = cosmics.fromfits(img) c = cosmics.cosmicsimage(array, gain=inst.get('gain'), readnoise=inst.get('read_noise'), sigclip=5, sigfrac=0.5, objlim=2.0) c.run(maxiter=5) cosmics.tofits('cosmic_' + img, c.cleanarray, header) img = 'cosmic_' + img print('\n### cosmic removal finished') else: print( '\n### No cosmic removal, saving normalized image for inspection???' ) if inst.get('arm') == 'blue' and len(list_arc_b) > 0: arcfile = list_arc_b[0] elif inst.get('arm') == 'red' and len(list_arc_r) > 0: arcfile = list_arc_r[0] else: arcfile = None if arcfile is not None and not arcfile.endswith(".fits"): arcfile = arcfile + '.fits' if not os.path.isdir('database/'): os.mkdir('database/') if _arc_identify: os.system('cp ' + arcfile + ' .') arcfile = string.split(arcfile, '/')[-1] arc_ex = re.sub('.fits', '.ms.fits', arcfile) arcref = inst.get('archive_arc_extracted') arcref_img = string.split(arcref, '/')[-1] arcref_img = arcref_img.replace('.ms.fits', '') arcrefid = inst.get('archive_arc_extracted_id') os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] os.system('cp ' + arcrefid + ' ./database') aperture = inst.get('archive_arc_aperture') os.system('cp ' + aperture + ' ./database') print('\n### arcfile : ', arcfile) print('\n### arcfile extraction : ', arc_ex) print('\n### arc reference : ', arcref) # read for some meta data to get the row right tmpHDU = pyfits.open(arcfile) header = tmpHDU[0].header try: spatialBin = int(header['binning'].split(',')[0]) except KeyError: spatialBin = 1 apLine = 700 // spatialBin iraf.specred.apall(arcfile, output=arc_ex, ref=arcref_img, line=apLine, nsum=10, interactive='no', extract='yes', find='yes', nfind=1, format='multispec', trace='no', back='no', recen='no') iraf.longslit.reidentify(referenc=arcref, images=arc_ex, interac='NO', section=inst.get('section'), coordli=inst.get('line_list'), shift='INDEF', search='INDEF', mode='h', verbose='YES', step=0, nsum=5, nlost=2, cradius=10, refit='yes', overrid='yes', newaps='no') print('\n### extraction using apall') result = [] hdr_image = util.readhdr(img) _type = util.readkey3(hdr_image, 'object') if (_type.startswith("arc") or _type.startswith("dflat") or _type.startswith("Dflat") or _type.startswith("Dbias") or _type.startswith("Bias")): print('\n### warning problem \n exit ') sys.exit() else: imgex = util.extractspectrum(img, dv, inst, _interactive, 'obj') print('\n### applying wavelength solution') print(arc_ex) iraf.disp(inlist=imgex, reference=arc_ex) result = result + [imgex] + [timg] # asci_files.append(imgasci) if not os.path.isdir(_object0 + '_ex/'): os.mkdir(_object0 + '_ex/') if not _arc_identify: util.delete(arcref) else: util.delete(arcfile) util.delete(arc_ex) util.delete(img) util.delete(imgex) util.delete(arcref) util.delete('logfile') #if _cosmic: #util.delete(img[7:]) #util.delete("cosmic_*") os.system('mv ' + 'd' + imgex + ' ' + _object0 + '_ex/') use_sens = raw_input('Use archival flux calibration? [y]/n ') if use_sens != 'no': sensfile = inst.get('archive_sens') os.system('cp ' + sensfile + ' ' + _object0 + '_ex/') bstarfile = inst.get('archive_bstar') os.system('cp ' + bstarfile + ' ' + _object0 + '_ex/') return result
def main(): description = "> Performs pre-reduction steps" usage = "%prog \t [option] \n Recommended syntax: %prog -i -c" parser = OptionParser(usage=usage, description=description, version="0.1") option, args = parser.parse_args() iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) iraf.ccdred.verbose = 'no' iraf.specred.verbose = 'no' iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" mkarc = raw_input("Make arc? ([y]/n): ") mkflat = raw_input("Make flat? ([y]/n): ") if len(args) > 1: files = [] sys.argv.append('--help') option, args = parser.parse_args() sys.exit() elif len(args) == 1: files = util.readlist(args[0]) sys.exit() else: listfile = glob.glob('*.fits') files_science = [] files_arc = [] files_dflat = [] #print 'checking your files ...' for img in listfile: _type = '' hdr0 = util.readhdr(img) _type = util.readkey3(hdr0, 'object') if 'flat' in _type.lower(): files_dflat.append(img) elif 'arc' not in _type.lower() and 'arc' not in img.lower(): files_science.append(img) if mkarc != 'n': mkarc_b = raw_input( "List blue arc files to combine (.fits will be added): " ).split() mkarc_r = raw_input( "List red arc files to combine (.fits will be added): ").split( ) for arc in mkarc_b: files_arc.append(arc + '.fits') for arc in mkarc_r: files_arc.append(arc + '.fits') if mkarc != 'n': list_arc_b = [] list_arc_r = [] for arcs in files_arc: if instruments.blue_or_red(arcs)[0] == 'blue': list_arc_b.append(arcs) elif instruments.blue_or_red(arcs)[0] == 'red': list_arc_r.append(arcs) else: sys.exit() if mkflat != 'n': list_flat_b = [] list_flat_r = [] for dflats in files_dflat: if instruments.blue_or_red(dflats)[0] == 'blue': list_flat_b.append(dflats) elif instruments.blue_or_red(dflats)[0] == 'red': list_flat_r.append(dflats) else: sys.exit() # make pre_reduced if it doesn't exist if not os.path.isdir('pre_reduced/'): os.mkdir('pre_reduced/') # log the existing processed files (need to verify this works if pre_reduced is empty...) pfiles = [] new_files = [] for root, dirnames, filenames in os.walk('pre_reduced'): for file in filenames: if file.startswith('to'): pfiles.append(file) print(pfiles) # loop over each image in pre_reduced for img in listfile: hdr = util.readhdr(img) targ = util.readkey3(hdr, 'object') # if file is not not a processed file, run the overscan+trim code if 'to' + img not in pfiles: # if the file is a science file, grab the name for later if 'arc' not in targ.lower() and 'flat' not in targ.lower(): new_files.append(img) print('Adding data for: ' + targ) inst = instruments.blue_or_red(img)[1] iraf.specred.dispaxi = inst.get('dispaxis') iraf.longslit.dispaxi = inst.get('dispaxis') _biassec0 = inst.get('biassec') _trimsec0 = inst.get('trimsec') ###################################################################### # # JB: this chunk of code needs attention # It seems incredibly hacky for anything but Kast... # # overscan if not img.startswith('o') and inst.get('observatory') == 'lick': if os.path.isfile('pre_reduced/o' + img): os.remove('pre_reduced/o' + img) util.kastbias(img, 'pre_reduced/o' + img) elif not img.startswith('o') and inst.get('observatory') != 'lick': if os.path.isfile('pre_reduced/o' + img): os.remove('pre_reduced/o' + img) os.system('cp ' + img + ' ' + 'pre_reduced/' + img) # trim if not img.startswith('t') and inst.get('observatory') == 'lick': if os.path.isfile('pre_reduced/to' + img): os.remove('pre_reduced/to' + img) iraf.ccdproc('pre_reduced/o' + img, output='pre_reduced/to' + img, overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='line', trimsec=str(_trimsec0), Stdout=1) elif not img.startswith('t') and inst.get('observatory') != 'lick': if os.path.isfile('pre_reduced/to' + img): os.remove('pre_reduced/to' + img) iraf.ccdproc('pre_reduced/' + img, output='pre_reduced/to' + img, overscan='yes', trim='yes', zerocor="no", flatcor="no", readaxi='line', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) # combine the arcs if mkarc != 'n': # blue arcs if len(list_arc_b) > 0: if len(list_arc_b) == 1: arc_blue = list_arc_b[0] os.system('cp ' + 'pre_reduced/to' + arc_blue + ' ' + 'pre_reduced/ARC_blue.fits') else: arc_str = '' for arc in list_arc_b: arc_str = arc_str + 'pre_reduced/to' + arc + ',' if os.path.isfile('pre_reduced/ARC_blue.fits'): os.remove('pre_reduced/ARC_blue.fits') iraf.imcombine(arc_str, output='pre_reduced/ARC_blue.fits') # red arcs if len(list_arc_r) > 0: if len(list_arc_r) == 1: arc_red = list_arc_r[0] os.system('cp ' + 'pre_reduced/to' + arc_red + ' ' + 'pre_reduced/ARC_red.fits') else: arc_str = '' for arc in list_arc_r: arc_str = arc_str + 'pre_reduced/to' + arc + ',' if os.path.isfile('pre_reduced/ARC_red.fits'): os.remove('pre_reduced/ARC_red.fits') iraf.imcombine(arc_str, output='pre_reduced/ARC_red.fits') # combine the flats if mkflat != 'n': inter = 'yes' # blue flats if len(list_flat_b) > 0: br, inst = instruments.blue_or_red(list_flat_b[0]) iraf.specred.dispaxi = inst.get('dispaxis') if len(list_flat_b) == 1: # Flat_blue = 'pre_reduced/to'+ list_flat_b[0] Flat_blue = list_flat_b[0] else: flat_str = '' for flat in list_flat_b: flat_str = flat_str + 'pre_reduced/to' + flat + ',' #subsets = 'no' if os.path.isfile('pre_reduced/toFlat_blue'): os.remove('pre_reduced/toFlat_blue') iraf.flatcombine(flat_str, output='pre_reduced/toFlat_blue', ccdtype='', rdnoise=3.7, subsets='no', process='no') Flat_blue = 'Flat_blue.fits' #What is the output here? Check for overwrite iraf.specred.response('pre_reduced/to' + Flat_blue, normaliz='pre_reduced/to' + Flat_blue, response='pre_reduced/RESP_blue', interac=inter, thresho='INDEF', sample='*', naverage=2, function='legendre', low_rej=3, high_rej=3, order=60, niterat=20, grow=0, graphic='stdgraph') # red flats if len(list_flat_r) > 0: br, inst = instruments.blue_or_red(list_flat_r[0]) iraf.specred.dispaxi = inst.get('dispaxis') if len(list_flat_r) == 1: # Flat_red = 'pre_reduced/to' + list_flat_r[0] Flat_red = list_flat_r[0] else: flat_str = '' for flat in list_flat_r: flat_str = flat_str + 'pre_reduced/to' + flat + ',' if os.path.isfile('pre_reduced/toFlat_red'): os.remove('pre_reduced/toFlat_red') iraf.flatcombine(flat_str, output='pre_reduced/toFlat_red', ccdtype='', rdnoise=3.8, subsets='yes', process='no') Flat_red = 'Flat_red.fits' #What is the output here? Check for overwrite iraf.specred.response('pre_reduced/to' + Flat_red, normaliz='pre_reduced/to' + Flat_red, response='pre_reduced/RESP_red', interac=inter, thresho='INDEF', sample='*', naverage=2, function='legendre', low_rej=3, high_rej=3, order=80, niterat=20, grow=0, graphic='stdgraph') # science files should have 't' in front now # this just gets the base name, to prefix assumed below if new_files is not None: files_science = new_files # get all the science objects for the night science_targets = [] for obj in files_science: hdr = util.readhdr(obj) _type = util.readkey3(hdr, 'object') science_targets.append(_type) # make a dir for each sci object science_targets = set(science_targets) for targ in science_targets: if not os.path.isdir('pre_reduced/' + targ + '/'): os.mkdir('pre_reduced/' + targ + '/') # copy the files into the obj dir for obj in files_science: hdr = util.readhdr(obj) targ = util.readkey3(hdr, 'object') if not obj.startswith('to'): os.system('cp ' + 'pre_reduced/to' + obj + ' ' + 'pre_reduced/' + targ + '/') else: os.system('cp ' + 'pre_reduced/' + obj + ' ' + 'pre_reduced/' + targ + '/') rawfiles = glob.glob('*.fits') ofiles = glob.glob('pre_reduced/o' + '*.fits') tfiles = glob.glob('pre_reduced/to' + '*.fits') # delete raw files from the pre_reduced dir # there shouldn't be any there though? # maybe if the overscan isn't implemented for that detector for img in rawfiles: util.delete('pre_reduced/' + img) # delete the ofiles from pre_reduced dir for img in ofiles: util.delete(img)