def findaperture(img, _interactive=False): # print "LOGX:: Entering `findaperture` method/function in %(__file__)s" % # globals() import re import string import os from pyraf import iraf import ntt iraf.noao(_doprint=0, Stdout=0) iraf.imred(_doprint=0, Stdout=0) iraf.specred(_doprint=0, Stdout=0) toforget = ['specred.apfind'] for t in toforget: iraf.unlearn(t) iraf.specred.databas = 'database' iraf.specred.dispaxi = 2 iraf.specred.apedit.thresho = 0 dv = ntt.dvex() grism = ntt.util.readkey3(ntt.util.readhdr(img), 'grism') if _interactive: _interac = 'yes' _edit = 'yes' else: _interac = 'no' _edit = 'no' if os.path.isfile('database/ap' + re.sub('.fits', '', img)): ntt.util.delete('database/ap' + re.sub('.fits', '', img)) xx = iraf.specred.apfind(img, interac=_interac, find='yes', recenter='yes', edit=_edit, resize='no', aperture=1, Stdout=1, nfind=1, line=dv['line'][grism], nsum=50, mode='h') try: for line in open('database/ap' + re.sub('.fits', '', img)): if "center" in line: center = float(string.split(line)[1]) except: center = 9999 return center
def findaperture(img, _interactive=False): # print "LOGX:: Entering `findaperture` method/function in %(__file__)s" % # globals() import re import string import os from pyraf import iraf import ntt iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) toforget = ['specred.apfind'] for t in toforget: iraf.unlearn(t) iraf.specred.databas = 'database' iraf.specred.dispaxi = 2 iraf.specred.apedit.thresho = 0 dv = ntt.dvex() grism = ntt.util.readkey3(ntt.util.readhdr(img), 'grism') if _interactive: _interac = 'yes' _edit = 'yes' else: _interac = 'no' _edit = 'no' if os.path.isfile('database/ap' + re.sub('.fits', '', img)): ntt.util.delete('database/ap' + re.sub('.fits', '', img)) xx = iraf.specred.apfind(img, interac=_interac, find='yes', recenter='yes', edit=_edit, resize='no', aperture=1, Stdout=1, nfind=1, line=dv['line'][grism], nsum=50, mode='h') try: for line in open('database/ap' + re.sub('.fits', '', img)): if "center" in line: center = float(string.split(line)[1]) except: center = 9999 return center
def efoscfastredu(imglist, _listsens, _listarc, _ext_trace, _dispersionline, _cosmic, _interactive): # print "LOGX:: Entering `efoscfastredu` method/function in %(__file__)s" # % globals() import string import os import re import sys os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits from ntt.util import readhdr, readkey3 import ntt import numpy as np dv = ntt.dvex() scal = np.pi / 180. if not _interactive: _interactive = False _inter = 'NO' else: _inter = 'YES' from pyraf import iraf iraf.noao(_doprint=0, Stdout=0) iraf.imred(_doprint=0, Stdout=0) iraf.ccdred(_doprint=0, Stdout=0) iraf.twodspec(_doprint=0, Stdout=0) iraf.longslit(_doprint=0, Stdout=0) iraf.onedspec(_doprint=0, Stdout=0) iraf.specred(_doprint=0, Stdout=0) toforget = [ 'ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext' ] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' # not print steps iraf.specred.verbose = 'no' # not print steps iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' _gain = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'gain') _ron = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'ron') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain iraf.specred.dispaxi = 2 iraf.longslit.dispaxi = 2 iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" iraf.set(direc=ntt.__path__[0] + '/') for img in imglist: hdr = ntt.util.readhdr(img) _tech = ntt.util.readkey3(hdr, 'tech') if _tech != 'SPECTRUM': sys.exit('error: ' + str(img) + ' is not a spectrum ') print '\n#### image name = ' + img + '\n' _grism0 = readkey3(hdr, 'grism') _filter0 = readkey3(hdr, 'filter') _slit0 = readkey3(hdr, 'slit') _object0 = readkey3(hdr, 'object') _date0 = readkey3(hdr, 'date-night') setup = (_grism0, _filter0, _slit0) _biassec0 = '[3:1010,1026:1029]' if _grism0 == 'Gr16': _trimsec0 = '[100:950,1:950]' elif _grism0 == 'Gr13': if _filter0 == 'Free': _trimsec0 = '[100:950,1:1015]' elif _filter0 == 'GG495': _trimsec0 = '[100:950,208:1015]' elif _filter0 == 'OG530': _trimsec0 = '[100:950,300:1015]' elif _grism0 == 'Gr11': _trimsec0 = '[100:950,5:1015]' else: _trimsec0 = '[100:950,5:1015]' _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = 't' + str(_object0) + '_' + str(_date0) for _set in setup: nameout0 = nameout0 + '_' + _set nameout0 = ntt.util.name_duplicate(img, nameout0, '') timg = nameout0 if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) iraf.ccdproc(timg, output='', overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=_biassec0, Stdout=1) img = timg if _listarc: arcfile = ntt.util.searcharc(img, _listarc)[0] else: arcfile = '' if not arcfile: arcfile = ntt.util.searcharc(img, '')[0] else: iraf.ccdproc(arcfile, output='t' + arcfile, overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) arcfile = 't' + arcfile if _cosmic: # print cosmic rays rejection ntt.cosmics.lacos(img, output='', gain=_gain, readn=_ron, xorder=9, yorder=9, sigclip=4.5, sigfrac=0.5, objlim=1, verbose=True, interactive=False) print '\n### cosmic rays rejections ........ done ' if not arcfile: print '\n### warning no arcfile \n exit ' else: arcref = ntt.util.searcharc(img, '')[0] if arcfile[0] == '/': os.system('cp ' + arcfile + ' ' + string.split(arcfile, '/')[-1]) arcfile = string.split(arcfile, '/')[-1] arcref = string.split(arcref, '/')[-1] if arcref: os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] if not os.path.isdir('database/'): os.mkdir('database/') if os.path.isfile( ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref)): os.system('cp ' + ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref) + ' database/') iraf.longslit.reidentify( referenc=arcref, images=arcfile, interac=_inter, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=0, newaps='no', nsum=5, nlost=2, mode='h', verbose='no') else: iraf.longslit.identify( images=arcfile, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', nsum=10, fwidth=7, order=3, mode='h') iraf.longslit.reident( referenc=arcfile, images=arcfile, interac='NO', section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=10, newaps='yes', nsum=5, nlost=2, mode='h', verbose='no') qqq = iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile), fitname=re.sub('.fits', '', arcfile), interac='no', combine='yes', databas='database', function='legendre', yorder=4, logfile='logfile', plotfil='', mode='h') iraf.specred.transform(input=img, output=img, minput='', fitnames=re.sub('.fits', '', arcfile), databas='database', x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h', logfile='logfile') # ###################### check wavelength calibration ############ _skyfile = ntt.__path__[0] + '/standard/ident/sky_' + setup[ 0] + '_' + setup[1] + '.fits' shift = ntt.efoscspec2Ddef.skyfrom2d(img, _skyfile) print '\n### check in wavelengh performed ...... spectrum shifted of ' + str( shift) + ' Angstrom \n' zro = pyfits.open(img)[0].header.get('CRVAL2') ntt.util.updateheader(img, 0, {'CRVAL2': [zro + int(shift), '']}) std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') hdrt = readhdr(img) _ra = readkey3(hdrt, 'RA') _dec = readkey3(hdrt, 'DEC') _object = readkey3(hdrt, 'object') dd = np.arccos( np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos( (_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' ntt.util.updateheader(img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) else: _type = 'obj' print '\n### EXTRACTION USING IRAF TASK APALL \n' result = [] if _type == 'obj': imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, _type) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader( imgex, 0, { 'PRODCATG': [ 'SCIENCE.' + readkey3(readhdr(imgex), 'tech').upper(), 'Data product category' ] }) ntt.util.updateheader(imgex, 0, {'TRACE1': [img, '']}) result.append(imgex) if _listsens: sensfile = ntt.util.searchsens(img, _listsens)[0] else: sensfile = '' if not sensfile: sensfile = ntt.util.searchsens(img, '')[0] if sensfile: imgf = re.sub('.fits', '_f.fits', img) _extinctdir = 'direc$standard/extinction/' _extinction = 'extinction_lasilla.dat' _observatory = 'lasilla' _exptime = readkey3(hdrt, 'exptime') _airmass = readkey3(hdrt, 'airmass') ntt.util.delete(imgf) iraf.specred.calibrate(input=imgex, output=imgf, sensiti=sensfile, extinct='yes', flux='yes', ignorea='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, exptime=_exptime, fnu='no') hedvec = { 'SENSFUN': [ string.split(sensfile, '/')[-1], 'sensitivity function' ], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum '], 'SNR': [ntt.util.StoN2(imgf, False), 'Average S/N ratio'], 'BUNIT': ['erg/cm2/s/Angstrom', 'Flux Calibration Units'], 'TRACE1': [imgex, ''] } ntt.util.updateheader(imgf, 0, hedvec) imgout = imgf imgd = ntt.efoscspec1Ddef.fluxcalib2d(img, sensfile) ntt.util.updateheader( imgd, 0, { 'FILETYPE': [ 22209, '2D wavelength and flux calibrated spectrum ' ] }) ntt.util.updateheader(imgd, 0, {'TRACE1': [img, '']}) imgasci = re.sub('.fits', '.asci', imgout) ntt.util.delete(imgasci) iraf.onedspec.wspectext(imgout + '[*,1,1]', imgasci, header='no') result = result + [imgout, imgd, imgasci] else: imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'std') imgout = ntt.efoscspec1Ddef.sensfunction( imgex, 'spline3', 6, _inter) result = result + [imgout] for img in result: if img[-5:] == '.fits': ntt.util.phase3header(img) # phase 3 definitions ntt.util.airmass(img) # phase 3 definitions ntt.util.updateheader( img, 0, {'quality': ['Rapid', 'Final or Rapid reduction']}) return result
def efoscspec1Dredu(files, _interactive, _ext_trace, _dispersionline, liststandard, listatmo0, _automaticex, _verbose=False): # print "LOGX:: Entering `efoscspec1Dredu` method/function in # %(__file__)s" % globals() import ntt try: import pyfits except: from astropy.io import fits as pyfits import re import string import sys import os import numpy as np os.environ["PYRAF_BETA_STATUS"] = "1" _extinctdir = 'direc$standard/extinction/' _extinction = 'lasilla2.txt' _observatory = 'lasilla' import datetime now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days dv = ntt.dvex() scal = np.pi / 180. _gain = ntt.util.readkey3(ntt.util.readhdr(re.sub('\n', '', files[0])), 'gain') _rdnoise = ntt.util.readkey3(ntt.util.readhdr(re.sub('\n', '', files[0])), 'ron') std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') objectlist = {} for img in files: hdr = ntt.util.readhdr(img) img = re.sub('\n', '', img) ntt.util.correctcard(img) _ra = ntt.util.readkey3(hdr, 'RA') _dec = ntt.util.readkey3(hdr, 'DEC') _object = ntt.util.readkey3(hdr, 'object') _grism = ntt.util.readkey3(hdr, 'grism') _filter = ntt.util.readkey3(hdr, 'filter') _slit = ntt.util.readkey3(hdr, 'slit') dd = np.arccos( np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos( (_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' else: _type = 'obj' if min(dd) < 100: ntt.util.updateheader(img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) if _type not in objectlist: objectlist[_type] = {} if (_grism, _filter, _slit) not in objectlist[_type]: objectlist[_type][_grism, _filter, _slit] = [img] else: objectlist[_type][_grism, _filter, _slit].append(img) from pyraf import iraf iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) iraf.imutil(_doprint=0) toforget = ['imutil.imcopy', 'specred.sarith', 'specred.standard'] for t in toforget: iraf.unlearn(t) iraf.specred.verbose = 'no' iraf.specred.dispaxi = 2 iraf.set(direc=ntt.__path__[0] + '/') sens = {} print objectlist outputfile = [] if 'obj' in objectlist.keys(): tpe = 'obj' elif 'stdsens' in objectlist.keys(): tpe = 'stdsens' else: sys.exit('error: no objects and no standards in the list') for setup in objectlist[tpe]: extracted = [] listatmo = [] if setup not in sens: sens[setup] = [] if tpe == 'obj': print '\n### setup= ', setup, '\n### objects= ', objectlist['obj'][ setup], '\n' for img in objectlist['obj'][setup]: # hdr=readhdr(img) print '\n\n### next object= ', img, ' ', ntt.util.readkey3( ntt.util.readhdr(img), 'object'), '\n' if os.path.isfile(re.sub('.fits', '_ex.fits', img)): if ntt.util.readkey3( ntt.util.readhdr(re.sub('.fits', '_ex.fits', img)), 'quality') == 'Rapid': ntt.util.delete(re.sub('.fits', '_ex.fits', img)) imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'obj', automaticex=_automaticex) if not os.path.isfile(imgex): sys.exit('### error, extraction not computed') if not ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') != 0.0: # if not readkey3(readhdr(imgex),'shift'): ntt.efoscspec1Ddef.checkwavestd(imgex, _interactive) extracted.append(imgex) if imgex not in outputfile: outputfile.append(imgex) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader( imgex, 0, { 'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(imgex), 'tech').upper(), 'Data product category' ] }) ntt.util.updateheader(imgex, 0, {'TRACE1': [img, 'Originating file']}) if os.path.isfile('database/ap' + re.sub('_ex.fits', '', imgex)): if 'database/ap' + re.sub('_ex.fits', '', imgex) not in outputfile: outputfile.append('database/ap' + re.sub('_ex.fits', '', imgex)) print '\n### all object with this setup extracted\n' if liststandard: standardlist = liststandard _type = 'stdfromdreducer' else: try: standardlist = objectlist['stdsens'][setup] _type = 'stdsens' except: standardlist = '' _type = '' if _type == 'stdfromdreducer' and len(extracted) >= 1: _outputsens2 = ntt.util.searchsens(extracted[0], standardlist)[0] print '\n### using standard from reducer ' + str(_outputsens2) elif _type not in ['stdsens', 'stdfromdreducer' ] and len(extracted) >= 1: _outputsens2 = ntt.util.searchsens(extracted[0], '')[0] os.system('cp ' + _outputsens2 + ' .') _outputsens2 = string.split(_outputsens2, '/')[-1] print '\n### no standard in the list, using standard from archive' else: for simg in standardlist: print '\n### standard for setup ' + \ str(setup) + ' = ', simg, ' ', ntt.util.readkey3( ntt.util.readhdr(simg), 'object'), '\n' simgex = ntt.util.extractspectrum(simg, dv, False, False, _interactive, 'std', automaticex=_automaticex) ntt.util.updateheader( simgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']}) ntt.util.updateheader( simgex, 0, { 'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(simgex), 'tech').upper(), 'Data product category' ] }) ntt.util.updateheader(simgex, 0, {'TRACE1': [simg, 'Originating file']}) if not ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') != 0.0: # if not readkey3(readhdr(simgex),'shift'): ntt.efoscspec1Ddef.checkwavestd(simgex, _interactive) atmofile = ntt.efoscspec1Ddef.telluric_atmo( simgex) # atmo file2 ntt.util.updateheader(atmofile, 0, {'TRACE1': [simgex, 'Originating file']}) ntt.util.updateheader( atmofile, 0, {'FILETYPE': [21211, 'telluric correction 1D spectrum ']}) if tpe != 'obj' and atmofile not in outputfile: outputfile.append(atmofile) if not listatmo0: listatmo.append(atmofile) sens[setup].append(simgex) if simgex not in outputfile: outputfile.append(simgex) if setup[0] == 'Gr13' and setup[1] == 'Free': if os.path.isfile(re.sub('Free', 'GG495', simg)): print '\n### extract standard frame with blocking filter to correct for second order contamination\n' simg2 = re.sub('Free', 'GG495', simg) simgex2 = ntt.util.extractspectrum( simg2, dv, False, False, _interactive, 'std', automaticex=_automaticex) ntt.util.updateheader( simgex2, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']}) ntt.util.updateheader( simgex2, 0, { 'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(simgex2), 'tech').upper(), 'Data product category' ] }) if not ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') != 0.0: # if not readkey3(readhdr(simgex2),'shift'): ntt.efoscspec1Ddef.checkwavestd( simgex2, _interactive) ntt.util.updateheader( simgex2, 0, {'TRACE1': [simg2, 'Originating file']}) print '\n### standard available: ', sens[setup] if tpe == 'obj': if len(sens[setup]) > 1: goon = 'no' while goon != 'yes': stdused = raw_input( '\n### more than one standard for this setup, which one do you want to use [' + sens[setup][0] + '] ?') if not stdused: stdused = sens[setup][0] if os.path.isfile(stdused): goon = 'yes' else: stdused = sens[setup][0] stdvec = [stdused] else: stdvec = sens[setup] for stdused in stdvec: stdusedclean = re.sub('_ex', '_clean', stdused) ntt.util.delete(stdusedclean) iraf.specred.sarith(input1=stdused, op='/', input2=atmofile, output=stdusedclean, format='multispec') _outputsens2 = ntt.efoscspec1Ddef.sensfunction( stdusedclean, 'spline3', 16, _interactive) ntt.util.updateheader( _outputsens2, 0, {'FILETYPE': [21212, 'sensitivity function']}) ntt.util.updateheader( _outputsens2, 0, {'TRACE1': [stdused, 'Originating file']}) if setup[0] == 'Gr13' and setup[1] == 'Free': if os.path.isfile(re.sub('Free', 'GG495', stdused)): print '\n### compute sensitivity function of grim 13 with blocking filter ' \ 'to correct for second order contamination \n' stdused2 = re.sub('Free', 'GG495', stdused) if not ntt.util.readkey3(ntt.util.readhdr(stdused2), 'STDNAME'): ntt.util.updateheader( stdused2, 0, { 'STDNAME': [ ntt.util.readkey3( ntt.util.readhdr(stdused), 'STDNAME'), '' ] }) atmofile2 = ntt.efoscspec1Ddef.telluric_atmo( stdused2) # atmo file2 stdusedclean2 = re.sub('_ex', '_clean', stdused2) ntt.util.delete(stdusedclean2) iraf.specred.sarith(input1=stdused2, op='/', input2=atmofile2, output=stdusedclean2, format='multispec') _outputsens3 = ntt.efoscspec1Ddef.sensfunction( stdusedclean2, 'spline3', 16, _interactive) ntt.util.updateheader( _outputsens3, 0, {'FILETYPE': [21212, 'sensitivity function']}) ntt.util.updateheader( _outputsens3, 0, {'TRACE1': [stdused2, 'Originating file']}) _outputsens2 = correctsens(_outputsens2, _outputsens3) if _outputsens2 not in outputfile: outputfile.append(_outputsens2) if _outputsens2 and tpe == 'obj': #################################################### for img in objectlist['obj'][setup]: # flux calibrate 2d images imgd = fluxcalib2d(img, _outputsens2) ntt.util.updateheader( imgd, 0, { 'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum '] }) ntt.util.updateheader(imgd, 0, {'TRACE1': [img, 'Originating files']}) iraf.hedit(imgd, 'PRODCATG', delete='yes', update='yes', verify='no') if imgd not in outputfile: outputfile.append(imgd) #################################################### # flux calib in the standard way if not listatmo and listatmo0: listatmo = listatmo0[:] for _imgex in extracted: _airmass = ntt.util.readkey3(ntt.util.readhdr(_imgex), 'airmass') _exptime = ntt.util.readkey3(ntt.util.readhdr(_imgex), 'exptime') _imgf = re.sub('_ex.fits', '_f.fits', _imgex) ntt.util.delete(_imgf) qqq = iraf.specred.calibrate(input=_imgex, output=_imgf, sensiti=_outputsens2, extinct='yes', flux='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, ignorea='yes', exptime=_exptime, fnu='no') hedvec = { 'SENSFUN': [_outputsens2, ''], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum', ''], # 'SNR':[ntt.util.StoN(_imgf,50),'Average signal to noise ratio per pixel'], 'SNR': [ ntt.util.StoN2(_imgf, False), 'Average signal to noise ratio per pixel' ], 'BUNIT': ['erg/cm2/s/Angstrom', 'Physical unit of array values'], 'TRACE1': [_imgex, 'Originating file'], 'ASSON1': [ re.sub('_f.fits', '_2df.fits', _imgf), 'Name of associated file' ], 'ASSOC1': ['ANCILLARY.2DSPECTRUM', 'Category of associated file'] } ntt.util.updateheader(_imgf, 0, hedvec) if _imgf not in outputfile: outputfile.append(_imgf) if listatmo: atmofile = ntt.util.searcharc(_imgex, listatmo)[0] if atmofile: _imge = re.sub('_f.fits', '_e.fits', _imgf) ntt.util.delete(_imge) iraf.specred.sarith(input1=_imgf, op='/', input2=atmofile, output=_imge, w1='INDEF', w2='INDEF', format='multispec') try: iraf.imutil.imcopy(input=_imgf + '[*,1,2]', output=_imge + '[*,1,2]', verbose='no') except: pass try: iraf.imutil.imcopy(input=_imgf + '[*,1,3]', output=_imge + '[*,1,3]', verbose='no') except: pass try: iraf.imutil.imcopy(input=_imgf + '[*,1,4]', output=_imge + '[*,1,4]', verbose='no') except: pass if _imge not in outputfile: outputfile.append(_imge) ntt.util.updateheader( _imge, 0, { 'FILETYPE': [ 22210, '1D, wave, flux calib, telluric corr.' ] }) if atmofile not in outputfile: outputfile.append(atmofile) ntt.util.updateheader(_imge, 0, {'ATMOFILE': [atmofile, '']}) ntt.util.updateheader( _imge, 0, {'TRACE1': [_imgf, 'Originating file']}) imgin = _imge else: imgin = _imgf else: imgin = _imgf imgasci = re.sub('.fits', '.asci', imgin) ntt.util.delete(imgasci) iraf.onedspec(_doprint=0) iraf.onedspec.wspectext(imgin + '[*,1,1]', imgasci, header='no') if imgasci not in outputfile: outputfile.append(imgasci) print '\n### adding keywords for phase 3 ....... ' for img in outputfile: if str(img)[-5:] == '.fits': try: ntt.util.phase3header(img) # phase 3 definitions ntt.util.updateheader(img, 0, {'quality': ['Final', '']}) except: print 'Warning: ' + img + ' is not a fits file' try: if int(re.sub('\.', '', str(pyfits.__version__))[:2]) <= 30: aa = 'HIERARCH ' else: aa = '' except: aa = '' imm = pyfits.open(img, mode='update') hdr = imm[0].header if aa + 'ESO DPR CATG' in hdr: hdr.pop(aa + 'ESO DPR CATG') if aa + 'ESO DPR TECH' in hdr: hdr.pop(aa + 'ESO DPR TECH') if aa + 'ESO DPR TYPE' in hdr: hdr.pop(aa + 'ESO DPR TYPE') imm.flush() imm.close() print outputfile reduceddata = ntt.rangedata(outputfile) f = open( 'logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list', 'w') for img in outputfile: try: f.write(ntt.util.readkey3(ntt.util.readhdr(img), 'arcfile') + '\n') except: pass f.close() return outputfile, 'logfile_spec1d_' + str(reduceddata) + '_' + str( datenow) + '.raw.list'
def efoscspec1Dredu(files, _interactive, _ext_trace, _dispersionline, liststandard, listatmo0, _automaticex, _verbose=False): # print "LOGX:: Entering `efoscspec1Dredu` method/function in # %(__file__)s" % globals() import ntt try: import pyfits except: from astropy.io import fits as pyfits import re import string import sys import os import numpy as np os.environ["PYRAF_BETA_STATUS"] = "1" _extinctdir = 'direc$standard/extinction/' _extinction = 'lasilla2.txt' _observatory = 'lasilla' import datetime now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days dv = ntt.dvex() scal = np.pi / 180. _gain = ntt.util.readkey3(ntt.util.readhdr( re.sub('\n', '', files[0])), 'gain') _rdnoise = ntt.util.readkey3( ntt.util.readhdr(re.sub('\n', '', files[0])), 'ron') std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') objectlist = {} for img in files: hdr = ntt.util.readhdr(img) img = re.sub('\n', '', img) ntt.util.correctcard(img) _ra = ntt.util.readkey3(hdr, 'RA') _dec = ntt.util.readkey3(hdr, 'DEC') _object = ntt.util.readkey3(hdr, 'object') _grism = ntt.util.readkey3(hdr, 'grism') _filter = ntt.util.readkey3(hdr, 'filter') _slit = ntt.util.readkey3(hdr, 'slit') dd = np.arccos(np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos((_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' else: _type = 'obj' if min(dd) < 100: ntt.util.updateheader( img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) if _type not in objectlist: objectlist[_type] = {} if (_grism, _filter, _slit) not in objectlist[_type]: objectlist[_type][_grism, _filter, _slit] = [img] else: objectlist[_type][_grism, _filter, _slit].append(img) from pyraf import iraf iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) iraf.imutil(_doprint=0) toforget = ['imutil.imcopy', 'specred.sarith', 'specred.standard'] for t in toforget: iraf.unlearn(t) iraf.specred.verbose = 'no' iraf.specred.dispaxi = 2 iraf.set(direc=ntt.__path__[0] + '/') sens = {} print objectlist outputfile = [] if 'obj' in objectlist.keys(): tpe = 'obj' elif 'stdsens' in objectlist.keys(): tpe = 'stdsens' else: sys.exit('error: no objects and no standards in the list') for setup in objectlist[tpe]: extracted = [] listatmo = [] if setup not in sens: sens[setup] = [] if tpe == 'obj': print '\n### setup= ', setup, '\n### objects= ', objectlist['obj'][setup], '\n' for img in objectlist['obj'][setup]: # hdr=readhdr(img) print '\n\n### next object= ', img, ' ', ntt.util.readkey3(ntt.util.readhdr(img), 'object'), '\n' if os.path.isfile(re.sub('.fits', '_ex.fits', img)): if ntt.util.readkey3(ntt.util.readhdr(re.sub('.fits', '_ex.fits', img)), 'quality') == 'Rapid': ntt.util.delete(re.sub('.fits', '_ex.fits', img)) imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'obj', automaticex=_automaticex) if not os.path.isfile(imgex): sys.exit('### error, extraction not computed') if not ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') != 0.0: # if not readkey3(readhdr(imgex),'shift'): ntt.efoscspec1Ddef.checkwavestd(imgex, _interactive) extracted.append(imgex) if imgex not in outputfile: outputfile.append(imgex) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader(imgex, 0, { 'PRODCATG': ['SCIENCE.' + ntt.util.readkey3(ntt.util.readhdr(imgex), 'tech').upper(), 'Data product category']}) ntt.util.updateheader( imgex, 0, {'TRACE1': [img, 'Originating file']}) if os.path.isfile('database/ap' + re.sub('_ex.fits', '', imgex)): if 'database/ap' + re.sub('_ex.fits', '', imgex) not in outputfile: outputfile.append( 'database/ap' + re.sub('_ex.fits', '', imgex)) print '\n### all object with this setup extracted\n' if liststandard: standardlist = liststandard _type = 'stdfromdreducer' else: try: standardlist = objectlist['stdsens'][setup] _type = 'stdsens' except: standardlist = '' _type = '' if _type == 'stdfromdreducer' and len(extracted) >= 1: _outputsens2 = ntt.util.searchsens(extracted[0], standardlist)[0] print '\n### using standard from reducer ' + str(_outputsens2) elif _type not in ['stdsens', 'stdfromdreducer'] and len(extracted) >= 1: _outputsens2 = ntt.util.searchsens(extracted[0], '')[0] os.system('cp ' + _outputsens2 + ' .') _outputsens2 = string.split(_outputsens2, '/')[-1] print '\n### no standard in the list, using standard from archive' else: for simg in standardlist: print '\n### standard for setup ' + \ str(setup) + ' = ', simg, ' ', ntt.util.readkey3( ntt.util.readhdr(simg), 'object'), '\n' simgex = ntt.util.extractspectrum( simg, dv, False, False, _interactive, 'std', automaticex=_automaticex) ntt.util.updateheader( simgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']}) ntt.util.updateheader(simgex, 0, { 'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3(ntt.util.readhdr(simgex), 'tech').upper(), 'Data product category']}) ntt.util.updateheader( simgex, 0, {'TRACE1': [simg, 'Originating file']}) if not ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') != 0.0: # if not readkey3(readhdr(simgex),'shift'): ntt.efoscspec1Ddef.checkwavestd(simgex, _interactive) atmofile = ntt.efoscspec1Ddef.telluric_atmo( simgex) # atmo file2 ntt.util.updateheader( atmofile, 0, {'TRACE1': [simgex, 'Originating file']}) ntt.util.updateheader( atmofile, 0, {'FILETYPE': [21211, 'telluric correction 1D spectrum ']}) if tpe != 'obj' and atmofile not in outputfile: outputfile.append(atmofile) if not listatmo0: listatmo.append(atmofile) sens[setup].append(simgex) if simgex not in outputfile: outputfile.append(simgex) if setup[0] == 'Gr13' and setup[1] == 'Free': if os.path.isfile(re.sub('Free', 'GG495', simg)): print '\n### extract standard frame with blocking filter to correct for second order contamination\n' simg2 = re.sub('Free', 'GG495', simg) simgex2 = ntt.util.extractspectrum(simg2, dv, False, False, _interactive, 'std', automaticex=_automaticex) ntt.util.updateheader( simgex2, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']}) ntt.util.updateheader(simgex2, 0, { 'PRODCATG': ['SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(simgex2), 'tech').upper(), 'Data product category']}) if not ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') and \ ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') != 0.0: # if not readkey3(readhdr(simgex2),'shift'): ntt.efoscspec1Ddef.checkwavestd( simgex2, _interactive) ntt.util.updateheader( simgex2, 0, {'TRACE1': [simg2, 'Originating file']}) print '\n### standard available: ', sens[setup] if tpe == 'obj': if len(sens[setup]) > 1: goon = 'no' while goon != 'yes': stdused = raw_input( '\n### more than one standard for this setup, which one do you want to use [' + sens[setup][ 0] + '] ?') if not stdused: stdused = sens[setup][0] if os.path.isfile(stdused): goon = 'yes' else: stdused = sens[setup][0] stdvec = [stdused] else: stdvec = sens[setup] for stdused in stdvec: stdusedclean = re.sub('_ex', '_clean', stdused) ntt.util.delete(stdusedclean) iraf.specred.sarith( input1=stdused, op='/', input2=atmofile, output=stdusedclean, format='multispec') _outputsens2 = ntt.efoscspec1Ddef.sensfunction( stdusedclean, 'spline3', 16, _interactive) ntt.util.updateheader(_outputsens2, 0, {'FILETYPE': [ 21212, 'sensitivity function']}) ntt.util.updateheader( _outputsens2, 0, {'TRACE1': [stdused, 'Originating file']}) if setup[0] == 'Gr13' and setup[1] == 'Free': if os.path.isfile(re.sub('Free', 'GG495', stdused)): print '\n### compute sensitivity function of grim 13 with blocking filter ' \ 'to correct for second order contamination \n' stdused2 = re.sub('Free', 'GG495', stdused) if not ntt.util.readkey3(ntt.util.readhdr(stdused2), 'STDNAME'): ntt.util.updateheader(stdused2, 0, { 'STDNAME': [ntt.util.readkey3(ntt.util.readhdr(stdused), 'STDNAME'), '']}) atmofile2 = ntt.efoscspec1Ddef.telluric_atmo( stdused2) # atmo file2 stdusedclean2 = re.sub('_ex', '_clean', stdused2) ntt.util.delete(stdusedclean2) iraf.specred.sarith(input1=stdused2, op='/', input2=atmofile2, output=stdusedclean2, format='multispec') _outputsens3 = ntt.efoscspec1Ddef.sensfunction( stdusedclean2, 'spline3', 16, _interactive) ntt.util.updateheader(_outputsens3, 0, {'FILETYPE': [ 21212, 'sensitivity function']}) ntt.util.updateheader( _outputsens3, 0, {'TRACE1': [stdused2, 'Originating file']}) _outputsens2 = correctsens(_outputsens2, _outputsens3) if _outputsens2 not in outputfile: outputfile.append(_outputsens2) if _outputsens2 and tpe == 'obj': #################################################### for img in objectlist['obj'][setup]: # flux calibrate 2d images imgd = fluxcalib2d(img, _outputsens2) ntt.util.updateheader( imgd, 0, {'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum ']}) ntt.util.updateheader( imgd, 0, {'TRACE1': [img, 'Originating files']}) iraf.hedit(imgd, 'PRODCATG', delete='yes', update='yes', verify='no') if imgd not in outputfile: outputfile.append(imgd) #################################################### # flux calib in the standard way if not listatmo and listatmo0: listatmo = listatmo0[:] for _imgex in extracted: _airmass = ntt.util.readkey3( ntt.util.readhdr(_imgex), 'airmass') _exptime = ntt.util.readkey3( ntt.util.readhdr(_imgex), 'exptime') _imgf = re.sub('_ex.fits', '_f.fits', _imgex) ntt.util.delete(_imgf) qqq = iraf.specred.calibrate(input=_imgex, output=_imgf, sensiti=_outputsens2, extinct='yes', flux='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, ignorea='yes', exptime=_exptime, fnu='no') hedvec = {'SENSFUN': [_outputsens2, ''], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum', ''], # 'SNR':[ntt.util.StoN(_imgf,50),'Average signal to noise ratio per pixel'], 'SNR': [ntt.util.StoN2(_imgf, False), 'Average signal to noise ratio per pixel'], 'BUNIT': ['erg/cm2/s/Angstrom', 'Physical unit of array values'], 'TRACE1': [_imgex, 'Originating file'], 'ASSON1': [re.sub('_f.fits', '_2df.fits', _imgf), 'Name of associated file'], 'ASSOC1': ['ANCILLARY.2DSPECTRUM', 'Category of associated file']} ntt.util.updateheader(_imgf, 0, hedvec) if _imgf not in outputfile: outputfile.append(_imgf) if listatmo: atmofile = ntt.util.searcharc(_imgex, listatmo)[0] if atmofile: _imge = re.sub('_f.fits', '_e.fits', _imgf) ntt.util.delete(_imge) iraf.specred.sarith(input1=_imgf, op='/', input2=atmofile, output=_imge, w1='INDEF', w2='INDEF', format='multispec') try: iraf.imutil.imcopy( input=_imgf + '[*,1,2]', output=_imge + '[*,1,2]', verbose='no') except: pass try: iraf.imutil.imcopy( input=_imgf + '[*,1,3]', output=_imge + '[*,1,3]', verbose='no') except: pass try: iraf.imutil.imcopy( input=_imgf + '[*,1,4]', output=_imge + '[*,1,4]', verbose='no') except: pass if _imge not in outputfile: outputfile.append(_imge) ntt.util.updateheader( _imge, 0, {'FILETYPE': [22210, '1D, wave, flux calib, telluric corr.']}) if atmofile not in outputfile: outputfile.append(atmofile) ntt.util.updateheader( _imge, 0, {'ATMOFILE': [atmofile, '']}) ntt.util.updateheader( _imge, 0, {'TRACE1': [_imgf, 'Originating file']}) imgin = _imge else: imgin = _imgf else: imgin = _imgf imgasci = re.sub('.fits', '.asci', imgin) ntt.util.delete(imgasci) iraf.onedspec(_doprint=0) iraf.onedspec.wspectext( imgin + '[*,1,1]', imgasci, header='no') if imgasci not in outputfile: outputfile.append(imgasci) print '\n### adding keywords for phase 3 ....... ' for img in outputfile: if str(img)[-5:] == '.fits': try: ntt.util.phase3header(img) # phase 3 definitions ntt.util.updateheader(img, 0, {'quality': ['Final', '']}) except: print 'Warning: ' + img + ' is not a fits file' try: if int(re.sub('\.', '', str(pyfits.__version__))[:2]) <= 30: aa = 'HIERARCH ' else: aa = '' except: aa = '' imm = pyfits.open(img, mode='update') hdr = imm[0].header if aa + 'ESO DPR CATG' in hdr: hdr.pop(aa + 'ESO DPR CATG') if aa + 'ESO DPR TECH' in hdr: hdr.pop(aa + 'ESO DPR TECH') if aa + 'ESO DPR TYPE' in hdr: hdr.pop(aa + 'ESO DPR TYPE') imm.flush() imm.close() print outputfile reduceddata = ntt.rangedata(outputfile) f = open('logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list', 'w') for img in outputfile: try: f.write(ntt.util.readkey3(ntt.util.readhdr(img), 'arcfile') + '\n') except: pass f.close() return outputfile, 'logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list'
def sofispec1Dredu(files, _interactive, _ext_trace, _dispersionline, _automaticex, _verbose=False): # print "LOGX:: Entering `sofispec1Dredu` method/function in %(__file__)s" # % globals() import re import string import sys import os os.environ["PYRAF_BETA_STATUS"] = "1" import ntt try: import pyfits except: from astropy.io import fits as pyfits import numpy as np import datetime import pylab as pl from pyraf import iraf dv = ntt.dvex() now = datetime.datetime.now() datenow = now.strftime('20%y%m%d%H%M') MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days scal = np.pi / 180. hdr0 = ntt.util.readhdr(re.sub('\n', '', files[0])) _gain = ntt.util.readkey3(hdr0, 'gain') _rdnoise = ntt.util.readkey3(hdr0, 'ron') std_sun, rastd_sun, decstd_sun, magstd_sun = ntt.util.readstandard( 'standard_sofi_sun.txt') std_vega, rastd_vega, decstd_vega, magstd_vega = ntt.util.readstandard( 'standard_sofi_vega.txt') std_phot, rastd_phot, decstd_phot, magstd_phot = ntt.util.readstandard( 'standard_sofi_phot.txt') outputfile = [] objectlist, RA, DEC = {}, {}, {} for img in files: img = re.sub('\n', '', img) hdr = ntt.util.readhdr(img) _ra = ntt.util.readkey3(hdr, 'RA') _dec = ntt.util.readkey3(hdr, 'DEC') _grism = ntt.util.readkey3(hdr, 'grism') _filter = ntt.util.readkey3(hdr, 'filter') _slit = ntt.util.readkey3(hdr, 'slit') cc_sun = np.arccos(np.sin(_dec * scal) * np.sin(decstd_sun * scal) + np.cos(_dec * scal) * np.cos(decstd_sun * scal) * np.cos((_ra - rastd_sun) * scal)) * ((180 / np.pi) * 3600) cc_vega = np.arccos(np.sin(_dec * scal) * np.sin(decstd_vega * scal) + np.cos(_dec * scal) * np.cos(decstd_vega * scal) * np.cos((_ra - rastd_vega) * scal)) * ((180 / np.pi) * 3600) cc_phot = np.arccos(np.sin(_dec * scal) * np.sin(decstd_phot * scal) + np.cos(_dec * scal) * np.cos(decstd_phot * scal) * np.cos((_ra - rastd_phot) * scal)) * ((180 / np.pi) * 3600) if min(cc_sun) < 100: _type = 'sun' elif min(cc_phot) < 100: _type = 'stdp' elif min(cc_vega) < 100: _type = 'vega' else: _type = 'obj' if min(cc_phot) < 100: if _verbose: print img, 'phot', str(min(cc_phot)), str(std_phot[np.argmin(cc_phot)]) ntt.util.updateheader(img, 0, {'stdname': [std_phot[np.argmin(cc_phot)], ''], 'magstd': [float(magstd_phot[np.argmin(cc_phot)]), '']}) # ntt.util.updateheader(img,0,{'magstd':[float(magstd_phot[argmin(cc_phot)]),'']}) elif min(cc_sun) < 100: if _verbose: print img, 'sun', str(min(cc_sun)), str(std_sun[np.argmin(cc_sun)]) ntt.util.updateheader(img, 0, {'stdname': [std_sun[np.argmin(cc_sun)], ''], 'magstd': [float(magstd_sun[np.argmin(cc_sun)]), '']}) # ntt.util.updateheader(img,0,{'magstd':[float(magstd_sun[argmin(cc_sun)]),'']}) elif min(cc_vega) < 100: if _verbose: print img, 'vega', str(min(cc_vega)), str(std_vega[np.argmin(cc_vega)]) ntt.util.updateheader(img, 0, {'stdname': [std_vega[np.argmin(cc_vega)], ''], 'magstd': [float(magstd_vega[np.argmin(cc_vega)]), '']}) # ntt.util.updateheader(img,0,{'magstd':[float(magstd_vega[argmin(cc_vega)]),'']}) else: if _verbose: print img, 'object' _OBID = (ntt.util.readkey3(hdr, 'esoid')) if _type not in objectlist: objectlist[_type] = {} if _grism not in objectlist[_type]: objectlist[_type][_grism] = {} if _OBID not in objectlist[_type][_grism]: objectlist[_type][_grism][_OBID] = [] objectlist[_type][_grism][_OBID].append(img) if 'stdp' not in objectlist: print '### warning: not photometric standard' else: print '### photometric standard in the list of object' if 'sun' not in objectlist: print '### warning: not telluric G standard (sun type)' else: print '### telluric G standard (sun type) in the list of object' if 'vega' not in objectlist: print '### warning: not telluric A standard (vega type)' else: print '### telluric A standard (vega type) in the list of object' iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.specred(_doprint=0) iraf.immatch(_doprint=0) iraf.imutil(_doprint=0) toforget = ['specred.apall', 'specred.transform'] for t in toforget: iraf.unlearn(t) iraf.specred.apall.readnoi = _rdnoise iraf.specred.apall.gain = _gain iraf.specred.dispaxi = 2 for _type in objectlist: for setup in objectlist[_type]: for _ID in objectlist[_type][setup]: listmerge = objectlist[_type][setup][_ID] listmerge = ntt.sortbyJD(listmerge) _object = ntt.util.readkey3( ntt.util.readhdr(listmerge[0]), 'object') if string.count(_object, '/') or string.count(_object, '.') or string.count(_object, ' '): nameobj = string.split(_object, '/')[0] nameobj = string.split(nameobj, ' ')[0] nameobj = string.split(nameobj, '.')[0] else: nameobj = _object _date = ntt.util.readkey3( ntt.util.readhdr(listmerge[0]), 'date-night') outputimage = nameobj + '_' + _date + \ '_' + setup + '_merge_' + str(MJDtoday) outputimage = ntt.util.name_duplicate( listmerge[0], outputimage, '') print '### setup= ', setup, ' name field= ', nameobj, ' merge image= ', outputimage, '\n' ################# # added to avoid crashing with a single frame # header will not be updated with all info ################# if len(listmerge)==1: ntt.util.delete(outputimage) iraf.imutil.imcopy(listmerge[0], output=outputimage, verbose='no') answ= 'n' else: if os.path.isfile(outputimage) and _interactive: answ = raw_input( 'combine frame of dithered spectra already created. Do you want to make it again [[y]/n] ? ') if not answ: answ = 'y' else: answ = 'y' ################# if answ in ['Yes', 'y', 'Y', 'yes']: if _interactive: automaticmerge = raw_input( '\n### Do you want to try to find the dither bethween frames automatically [[y]/n]') if not automaticmerge: automaticmerge = 'yes' elif automaticmerge.lower() in ['y', 'yes']: automaticmerge = 'yes' else: automaticmerge = 'no' else: automaticmerge = 'yes' if automaticmerge == 'yes': offset = 0 offsetvec = [] _center0 = ntt.sofispec1Ddef.findaperture( listmerge[0], False) _offset0 = ntt.util.readkey3( ntt.util.readhdr(listmerge[0]), 'xcum') print '\n### Try to merge spectra considering their offset along x axes .......' f = open('_offset', 'w') for img in listmerge: _center = ntt.sofispec1Ddef.findaperture( img, False) _center2 = ( float(_center) + (float(_offset0) - float(_center0))) * (-1) _offset = (-1) * \ ntt.util.readkey3( ntt.util.readhdr(img), 'xcum') if abs(_center2 - _offset) >= 20: automaticmerge = 'no' break else: offset3 = _center2 offsetvec.append(offset3) line = str(offset3) + ' 0\n' f.write(line) f.close() if automaticmerge == 'yes': print '### automatic merge .......... done' else: print '\n### warning: try identification of spectra position in interactive way ' offset = 0 offsetvec = [] _z1, _z2, goon = ntt.util.display_image( listmerge[0], 1, '', '', False) print '\n### find aperture on first frame and use it as reference position of ' \ 'the spectra (mark with ' + '"' + 'm' + '"' + ')' _center0 = ntt.sofispec1Ddef.findaperture( listmerge[0], True) _offset0 = ntt.util.readkey3( ntt.util.readhdr(listmerge[0]), 'xcum') print '\n### find the aperture on all the spectra frames (mark with ' + '"' + 'm' + '"' + ')' f = open('_offset', 'w') for img in listmerge: print '\n### ', img _z1, _z2, goon = ntt.util.display_image( img, 1, '', '', False) _center = ntt.sofispec1Ddef.findaperture(img, True) _center2 = ( float(_center) + (float(_offset0) - float(_center0))) * (-1) _offset = (-1) * \ ntt.util.readkey3( ntt.util.readhdr(img), 'xcum') print '\n### position from dither header: ' + str(_offset) print '### position identified interactively: ' + str(_center2) offset3 = raw_input( '\n### which is the right position [' + str(_center2) + '] ?') if not offset3: offset3 = _center2 offsetvec.append(offset3) line = str(offset3) + ' 0\n' f.write(line) f.close() print offsetvec start = int(max(offsetvec) - min(offsetvec)) print start f = open('_goodlist', 'w') print listmerge for img in listmerge: f.write(img + '\n') f.close() ntt.util.delete(outputimage) ntt.util.delete('_output.fits') yy1 = pyfits.open(listmerge[0])[0].data[:, 10] iraf.immatch.imcombine('@_goodlist', '_output', combine='sum', reject='none', offset='_offset', masktyp='', rdnoise=_rdnoise, gain=_gain, zero='mode', Stdout=1) _head = pyfits.open('_output.fits')[0].header if _head['NAXIS1'] < 1024: stop = str(_head['NAXIS1']) else: stop = '1024' iraf.imutil.imcopy( '_output[' + str(start) + ':'+stop+',*]', output=outputimage, verbose='no') print outputimage print len(listmerge) hdr1 = ntt.util.readhdr(outputimage) ntt.util.updateheader(outputimage, 0, {'SINGLEXP': [False, 'TRUE if resulting from single exposure'], 'M_EPOCH': [False, 'TRUE if resulting from multiple epochs'], 'EXPTIME': [ntt.util.readkey3(hdr1, 'EXPTIME') * len(listmerge), 'Total integration time per pixel (s)'], 'TEXPTIME': [float(ntt.util.readkey3(hdr1, 'TEXPTIME')) * len(listmerge), 'Total integration time of all exposures (s)'], 'APERTURE': [2.778e-4 * float(re.sub('long_slit_', '', ntt.util.readkey3(hdr1, 'slit'))), '[deg] Aperture diameter'], 'NOFFSETS': [2, 'Number of offset positions'], 'NUSTEP': [0, 'Number of microstep positions'], 'NJITTER': [int(ntt.util.readkey3(hdr1, 'NCOMBINE') / 2), 'Number of jitter positions']}) hdr = ntt.util.readhdr(outputimage) matching = [s for s in hdr.keys() if "IMCMB" in s] for imcmb in matching: aaa = iraf.hedit(outputimage, imcmb, delete='yes', update='yes', verify='no', Stdout=1) if 'SKYSUB' in hdr.keys(): aaa = iraf.hedit(outputimage, 'SKYSUB', delete='yes', update='yes', verify='no', Stdout=1) mjdend = [] mjdstart = [] num = 0 for img in listmerge: num = num + 1 hdrm = ntt.util.readhdr(img) ntt.util.updateheader(outputimage, 0, {'PROV' + str(num): [ntt.util.readkey3( hdrm, 'ARCFILE'), 'Originating file'], 'TRACE' + str(num): [img, 'Originating file']}) mjdend.append(ntt.util.readkey3(hdrm, 'MJD-END')) mjdstart.append(ntt.util.readkey3(hdrm, 'MJD-OBS')) _dateobs = ntt.util.readkey3(ntt.util.readhdr( listmerge[np.argmin(mjdstart)]), 'DATE-OBS') _telapse = (max(mjdend) - min(mjdstart)) * \ 60. * 60 * 24. # *86400 _tmid = (max(mjdend) + min(mjdstart)) / 2 _title = str(_tmid)[0:9] + ' ' + str(ntt.util.readkey3(hdr, 'object')) + ' ' + str( ntt.util.readkey3(hdr, 'grism')) + ' ' + \ str(ntt.util.readkey3(hdr, 'filter')) + \ ' ' + str(ntt.util.readkey3(hdr, 'slit')) ntt.util.updateheader(outputimage, 0, {'MJD-OBS': [min(mjdstart), 'MJD start'], 'MJD-END': [max(mjdend), 'MJD end'], 'TELAPSE': [_telapse, 'Total elapsed time [days]'], 'TMID': [_tmid, '[d] MJD mid exposure'], 'TITLE': [_title, 'Dataset title'], 'DATE-OBS': [_dateobs, 'Date of observation']}) # missing: merge airmass else: print '\n### skip making again combined spectrum' objectlist[_type][setup][_ID] = [outputimage] print '\n### setup= ', setup, ' name field= ', nameobj, ' merge image= ', outputimage, '\n' if outputimage not in outputfile: outputfile.append(outputimage) ntt.util.updateheader(outputimage, 0, {'FILETYPE': [ 42116, 'combine 2D spectra frame']}) if _verbose: if 'obj' in objectlist: print objectlist['obj'] if 'stdp' in objectlist: print objectlist['stdp'] if 'sun' in objectlist: print objectlist['sun'] if 'vega' in objectlist: print objectlist['vega'] if 'obj' not in objectlist.keys(): sys.exit('\n### error: no objects in the list') sens = {} print '\n############################################\n### extract the spectra ' # print objectlist for setup in objectlist['obj']: reduced = [] for _ID in objectlist['obj'][setup]: for img in objectlist['obj'][setup][_ID]: hdr = ntt.util.readhdr(img) print '\n### next object\n ', img, ntt.util.readkey3(hdr, 'object') _grism = ntt.util.readkey3(hdr, 'grism') _exptimeimg = ntt.util.readkey3(hdr, 'exptime') _JDimg = ntt.util.readkey3(hdr, 'JD') imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'obj', automaticex=_automaticex) if imgex not in outputfile: outputfile.append(imgex) ntt.util.updateheader(imgex, 0, {'FILETYPE': [42107, 'extracted 1D wave calib'], 'PRODCATG': ['SCIENCE.' + ntt.util.readkey3(hdr, 'tech').upper(), 'Data product category']}) hdr = ntt.util.readhdr(imgex) matching = [s for s in hdr.keys() if "TRACE" in s] for imcmb in matching: aaa = iraf.hedit(imgex, imcmb, delete='yes', update='yes', verify='no', Stdout=1) ntt.util.updateheader( imgex, 0, {'TRACE1': [img, 'Originating file']}) if os.path.isfile('database/ap' + re.sub('_ex.fits', '', imgex)): if 'database/ap' + re.sub('_ex.fits', '', imgex) not in outputfile: outputfile.append( 'database/ap' + re.sub('_ex.fits', '', imgex)) ########################### telluric standard ############# if 'sun' in objectlist and setup in objectlist['sun']: _type = 'sun' elif 'vega' in objectlist and setup in objectlist['vega']: _type = 'vega' else: _type = 'none' if _type in ['sun', 'vega']: stdref = ntt.__path__[ 0] + '/standard/fits/' + str(_type) + '.fits' stdvec, airmassvec, JDvec = [], [], [] for _ID in objectlist[_type][setup]: for std in objectlist[_type][setup][_ID]: _airmassstd = ntt.util.readkey3( ntt.util.readhdr(std), 'airmass') _JDstd = ntt.util.readkey3( ntt.util.readhdr(std), 'JD') JDvec.append(abs(_JDstd - _JDimg)) stdvec.append(std) airmassvec.append(_airmassstd) stdtelluric = stdvec[np.argmin(JDvec)] _exptimestd = ntt.util.readkey3( ntt.util.readhdr(stdtelluric), 'exptime') _magstd = ntt.util.readkey3( ntt.util.readhdr(stdtelluric), 'magstd') print '\n\n ##### closer standard for telluric corrections #### \n\n' print stdtelluric, airmassvec[np.argmin(JDvec)] stdtelluric_ex = ntt.util.extractspectrum(stdtelluric, dv, False, False, _interactive, 'std', automaticex=_automaticex) if stdtelluric_ex not in outputfile: outputfile.append(stdtelluric_ex) ntt.util.updateheader(stdtelluric_ex, 0, {'FILETYPE': [ 42107, 'extracted 1D wave calib ']}) ntt.util.updateheader(stdtelluric_ex, 0, {'PRODCATG': [ 'SCIENCE.' + ntt.util.readkey3( ntt.util.readhdr(stdtelluric_ex), 'tech').upper(), 'Data product category']}) hdr = ntt.util.readhdr(stdtelluric_ex) matching = [s for s in hdr.keys() if "TRACE" in s] for imcmb in matching: aaa = iraf.hedit( stdtelluric_ex, imcmb, delete='yes', update='yes', verify='no', Stdout=1) ntt.util.updateheader(stdtelluric_ex, 0, {'TRACE1': [ stdtelluric, 'Originating file']}) ########################################################### # SN tellurich calibration imgf = re.sub('_ex.fits', '_f.fits', imgex) imgf, senstelluric = ntt.sofispec1Ddef.calibrationsofi(imgex, stdtelluric_ex, stdref, imgf, _interactive) if imgf not in outputfile: outputfile.append(imgf) if senstelluric not in outputfile: outputfile.append(senstelluric) ntt.util.updateheader(imgf, 0, {'FILETYPE': [42208, '1D wave calib, tell cor.'], # 'SNR': [ntt.util.StoN(imgf, 50), 'SNR': [ntt.util.StoN2(imgf, False), 'Average signal to noise ratio per pixel'], 'TRACE1': [imgex, 'Originating file'], 'ASSON1': [re.sub('_f.fits', '_2df.fits', imgf), 'Name of associated file'], 'ASSOC1': ['ANCILLARY.2DSPECTRUM', 'Category of associated file']}) ########################################################### imgd = ntt.efoscspec1Ddef.fluxcalib2d( img, senstelluric) # flux calibration 2d images ntt.util.updateheader( imgd, 0, {'FILETYPE': [42209, '2D wavelength and flux calibrated spectrum']}) iraf.hedit(imgd, 'PRODCATG', delete='yes', update='yes', verify='no') hdrd = ntt.util.readhdr(imgd) matching = [s for s in hdrd.keys() if "TRACE" in s] for imcmb in matching: aaa = iraf.hedit( imgd, imcmb, delete='yes', update='yes', verify='no', Stdout=1) ntt.util.updateheader( imgd, 0, {'TRACE1': [img, 'Originating file']}) if imgd not in outputfile: outputfile.append(imgd) ############################################################### if 'stdp' in objectlist and setup in objectlist['stdp']: print '\n ##### photometric calibration ######\n ' standardfile = [] for _ID in objectlist['stdp'][setup]: for stdp in objectlist['stdp'][setup][_ID]: stdp_ex = ntt.util.extractspectrum(stdp, dv, False, _dispersionline, _interactive, 'std', automaticex=_automaticex) standardfile.append(stdp_ex) if stdp_ex not in outputfile: outputfile.append(stdp_ex) ntt.util.updateheader(stdp_ex, 0, { 'FILETYPE': [42107, 'extracted 1D wave calib'], 'TRACE1': [stdp_ex, 'Originating file'], 'PRODCATG': ['SCIENCE.' + ntt.util.readkey3(ntt.util.readhdr(stdp_ex), 'tech').upper(), 'Data product category']}) print '\n### ', standardfile, ' \n' if len(standardfile) >= 2: standardfile0 = raw_input( 'which one do you want to use [' + str(standardfile[0]) + '] ? ') if not standardfile0: standardfile0 = standardfile[0] else: standardfile0 = standardfile[0] print standardfile0 stdpf = re.sub('_ex.fits', '_f.fits', standardfile0) stdpf, senstelluric2 = ntt.sofispec1Ddef.calibrationsofi(standardfile0, stdtelluric_ex, stdref, stdpf, _interactive) if stdpf not in outputfile: outputfile.append(stdpf) ntt.util.updateheader(stdpf, 0, {'FILETYPE': [42208, '1D wave calib, tell cor'], 'TRACE1': [stdp, 'Originating file']}) stdname = ntt.util.readkey3( ntt.util.readhdr(standardfile0), 'stdname') standardfile = ntt.__path__[ 0] + '/standard/flux/' + stdname xx, yy = ntt.util.ReadAscii2(standardfile) crval1 = pyfits.open(stdpf)[0].header.get('CRVAL1') cd1 = pyfits.open(stdpf)[0].header.get('CD1_1') datastdpf, hdrstdpf = pyfits.getdata(stdpf, 0, header=True) xx1 = np.arange(len(datastdpf[0][0])) aa1 = crval1 + (xx1) * cd1 yystd = np.interp(aa1, xx, yy) rcut = np.compress( ((aa1 < 13000) | (aa1 > 15150)) & ((11700 < aa1) | (aa1 < 11000)) & (aa1 > 10000) & ((aa1 < 17800) | (aa1 > 19600)) & (aa1 < 24000), datastdpf[0][0] / yystd) aa11 = np.compress( ((aa1 < 13000) | (aa1 > 15150)) & ((11700 < aa1) | (aa1 < 11000)) & (aa1 > 10000) & ((aa1 < 17800) | (aa1 > 19600)) & (aa1 < 24000), aa1) yy1clean = np.interp(aa1, aa11, rcut) aa1 = np.array(aa1) yy1clean = np.array(yy1clean) A = np.ones((len(rcut), 2), dtype=float) A[:, 0] = aa11 result = np.linalg.lstsq(A, rcut) # result=[zero,slope] p = [result[0][1], result[0][0]] yfit = ntt.util.pval(aa1, p) pl.clf() pl.ion() pl.plot(aa1, datastdpf[0][0] / yystd, color='red', label='std') pl.plot(aa1, yfit, color='blue', label='fit') pl.legend(numpoints=1, markerscale=1.5) # sens function sofi spectra outputsens = 'sens_' + stdpf ntt.util.delete(outputsens) datastdpf[0][0] = yfit pyfits.writeto(outputsens, np.float32(datastdpf), hdrstdpf) ################# imgsc = re.sub('_ex.fits', '_sc.fits', imgex) ntt.util.delete(imgsc) crval2 = pyfits.open(imgf)[0].header.get('CRVAL1') cd2 = pyfits.open(imgf)[0].header.get('CD1_1') dataf, hdrf = pyfits.getdata(imgf, 0, header=True) xx2 = np.arange(len(dataf[0][0])) aa2 = crval2 + (xx2) * cd2 yyscale = np.interp(aa2, aa1, yfit) dataf[0][0] = dataf[0][0] / yyscale dataf[1][0] = dataf[1][0] / yyscale dataf[2][0] = dataf[2][0] / yyscale dataf[3][0] = dataf[3][0] / yyscale pyfits.writeto(imgsc, np.float32(dataf), hdrf) ntt.util.updateheader(imgsc, 0, {'SENSPHOT': [outputsens, 'sens used to flux cal'], 'FILETYPE': [42208, '1D wave,flux calib, tell cor'], 'TRACE1': [imgf, 'Originating file']}) # ntt.util.updateheader(imgsc,0,{'FILETYPE':[42208,'1D wave,flux calib, tell cor']}) # ntt.util.updateheader(imgsc,0,{'TRACE1':[imgf,'']}) print '\n### flux calibrated spectrum= ', imgf, ' with the standard= ', stdpf if imgsc not in outputfile: outputfile.append(imgsc) else: print '\n### photometric calibrated not performed \n' print '\n### adding keywords for phase 3 ....... ' reduceddata = ntt.util.rangedata(outputfile) f = open('logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list', 'w') for img in outputfile: if str(img)[-5:] == '.fits': hdr = ntt.util.readhdr(img) # added for DR2 if 'NCOMBINE' in hdr: _ncomb = ntt.util.readkey3(hdr, 'NCOMBINE') else: _ncomb = 1.0 _effron = 12. * \ (1 / np.sqrt(ntt.util.readkey3(hdr, 'ndit') * _ncomb)) * \ np.sqrt(np.pi / 2) try: ntt.util.phase3header(img) # phase 3 definitions ntt.util.updateheader(img, 0, {'quality': ['Final', ''], 'EFFRON': [_effron, 'Effective readout noise per output (e-)']}) f.write(ntt.util.readkey3( ntt.util.readhdr(img), 'arcfile') + '\n') except: print 'Warning: ' + img + ' is not a fits file' f.close() return outputfile, 'logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list'
def efoscfastredu(imglist, _listsens, _listarc, _ext_trace, _dispersionline, _cosmic, _interactive): # print "LOGX:: Entering `efoscfastredu` method/function in %(__file__)s" # % globals() import string import os import re import sys os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits from ntt.util import readhdr, readkey3 import ntt import numpy as np dv = ntt.dvex() scal = np.pi / 180. if not _interactive: _interactive = False _inter = 'NO' else: _inter = 'YES' from pyraf import iraf iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) toforget = ['ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext'] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' # not print steps iraf.specred.verbose = 'no' # not print steps iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' _gain = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'gain') _ron = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'ron') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain iraf.specred.dispaxi = 2 iraf.longslit.dispaxi = 2 iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" iraf.set(direc=ntt.__path__[0] + '/') for img in imglist: hdr = ntt.util.readhdr(img) _tech = ntt.util.readkey3(hdr, 'tech') if _tech != 'SPECTRUM': sys.exit('error: ' + str(img) + ' is not a spectrum ') print '\n#### image name = ' + img + '\n' _grism0 = readkey3(hdr, 'grism') _filter0 = readkey3(hdr, 'filter') _slit0 = readkey3(hdr, 'slit') _object0 = readkey3(hdr, 'object') _date0 = readkey3(hdr, 'date-night') setup = (_grism0, _filter0, _slit0) _biassec0 = '[3:1010,1026:1029]' if _grism0 == 'Gr16': _trimsec0 = '[100:950,1:950]' elif _grism0 == 'Gr13': if _filter0 == 'Free': _trimsec0 = '[100:950,1:1015]' elif _filter0 == 'GG495': _trimsec0 = '[100:950,208:1015]' elif _filter0 == 'OG530': _trimsec0 = '[100:950,300:1015]' elif _grism0 == 'Gr11': _trimsec0 = '[100:950,5:1015]' else: _trimsec0 = '[100:950,5:1015]' _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = 't' + str(_object0) + '_' + str(_date0) for _set in setup: nameout0 = nameout0 + '_' + _set nameout0 = ntt.util.name_duplicate(img, nameout0, '') timg = nameout0 if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) iraf.ccdproc(timg, output='', overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=_biassec0, Stdout=1) img = timg if _listarc: arcfile = ntt.util.searcharc(img, _listarc)[0] else: arcfile = '' if not arcfile: arcfile = ntt.util.searcharc(img, '')[0] else: iraf.ccdproc(arcfile, output='t' + arcfile, overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) arcfile = 't' + arcfile if _cosmic: # print cosmic rays rejection ntt.cosmics.lacos(img, output='', gain=_gain, readn=_ron, xorder=9, yorder=9, sigclip=4.5, sigfrac=0.5, objlim=1, verbose=True, interactive=False) print '\n### cosmic rays rejections ........ done ' if not arcfile: print '\n### warning no arcfile \n exit ' else: arcref = ntt.util.searcharc(img, '')[0] if arcfile[0] == '/': os.system('cp ' + arcfile + ' ' + string.split(arcfile, '/')[-1]) arcfile = string.split(arcfile, '/')[-1] arcref = string.split(arcref, '/')[-1] if arcref: os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] if not os.path.isdir('database/'): os.mkdir('database/') if os.path.isfile(ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref)): os.system('cp ' + ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref) + ' database/') iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac=_inter, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=0, newaps='no', nsum=5, nlost=2, mode='h', verbose='no') else: iraf.longslit.identify(images=arcfile, section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', nsum=10, fwidth=7, order=3, mode='h') iraf.longslit.reident(referenc=arcfile, images=arcfile, interac='NO', section='column 10', coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=10, newaps='yes', nsum=5, nlost=2, mode='h', verbose='no') qqq = iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile), fitname=re.sub('.fits', '', arcfile), interac='no', combine='yes', databas='database', function='legendre', yorder=4, logfile='logfile', plotfil='', mode='h') iraf.specred.transform(input=img, output=img, minput='', fitnames=re.sub('.fits', '', arcfile), databas='database', x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h', logfile='logfile') # ###################### check wavelength calibration ############ _skyfile = ntt.__path__[ 0] + '/standard/ident/sky_' + setup[0] + '_' + setup[1] + '.fits' shift = ntt.efoscspec2Ddef.skyfrom2d(img, _skyfile) print '\n### check in wavelengh performed ...... spectrum shifted of ' + str(shift) + ' Angstrom \n' zro = pyfits.open(img)[0].header.get('CRVAL2') ntt.util.updateheader(img, 0, {'CRVAL2': [zro + int(shift), '']}) std, rastd, decstd, magstd = ntt.util.readstandard( 'standard_efosc_mab.txt') hdrt = readhdr(img) _ra = readkey3(hdrt, 'RA') _dec = readkey3(hdrt, 'DEC') _object = readkey3(hdrt, 'object') dd = np.arccos(np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos((_ra - rastd) * scal)) * ((180 / np.pi) * 3600) if min(dd) < 100: _type = 'stdsens' ntt.util.updateheader( img, 0, {'stdname': [std[np.argmin(dd)], '']}) ntt.util.updateheader( img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']}) else: _type = 'obj' print '\n### EXTRACTION USING IRAF TASK APALL \n' result = [] if _type == 'obj': imgex = ntt.util.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, _type) ntt.util.updateheader( imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']}) ntt.util.updateheader(imgex, 0, { 'PRODCATG': ['SCIENCE.' + readkey3(readhdr(imgex), 'tech').upper(), 'Data product category']}) ntt.util.updateheader(imgex, 0, {'TRACE1': [img, '']}) result.append(imgex) if _listsens: sensfile = ntt.util.searchsens(img, _listsens)[0] else: sensfile = '' if not sensfile: sensfile = ntt.util.searchsens(img, '')[0] if sensfile: imgf = re.sub('.fits', '_f.fits', img) _extinctdir = 'direc$standard/extinction/' _extinction = 'extinction_lasilla.dat' _observatory = 'lasilla' _exptime = readkey3(hdrt, 'exptime') _airmass = readkey3(hdrt, 'airmass') ntt.util.delete(imgf) iraf.specred.calibrate(input=imgex, output=imgf, sensiti=sensfile, extinct='yes', flux='yes', ignorea='yes', extinction=_extinctdir + _extinction, observatory=_observatory, airmass=_airmass, exptime=_exptime, fnu='no') hedvec = {'SENSFUN': [string.split(sensfile, '/')[-1], 'sensitivity function'], 'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum '], 'SNR': [ntt.util.StoN2(imgf, False), 'Average S/N ratio'], 'BUNIT': ['erg/cm2/s/Angstrom', 'Flux Calibration Units'], 'TRACE1': [imgex, '']} ntt.util.updateheader(imgf, 0, hedvec) imgout = imgf imgd = ntt.efoscspec1Ddef.fluxcalib2d(img, sensfile) ntt.util.updateheader( imgd, 0, {'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum ']}) ntt.util.updateheader(imgd, 0, {'TRACE1': [img, '']}) imgasci = re.sub('.fits', '.asci', imgout) ntt.util.delete(imgasci) iraf.onedspec.wspectext( imgout + '[*,1,1]', imgasci, header='no') result = result + [imgout, imgd, imgasci] else: imgex = ntt.util.extractspectrum( img, dv, _ext_trace, _dispersionline, _interactive, 'std') imgout = ntt.efoscspec1Ddef.sensfunction( imgex, 'spline3', 6, _inter) result = result + [imgout] for img in result: if img[-5:] == '.fits': ntt.util.phase3header(img) # phase 3 definitions ntt.util.airmass(img) # phase 3 definitions ntt.util.updateheader( img, 0, {'quality': ['Rapid', 'Final or Rapid reduction']}) return result