示例#1
0
def run_darkcombine(images, output):
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.darkcombine(input=images, output=output, combine="average", reject="minmax", \
         ccdtype="none", process="Yes", delete="No", clobber="No", scale="exposure", \
         statsec='', nlow=0, nhigh=1, nkeep=1, mclip="Yes", lsigma=3.0, hsigma=3.0, \
         rdnoise=0., gain=1., snoise=0., pclip=-0.5, blank=0.0, mode="al")
示例#2
0
def combinebias(lstfn):
    """
    call iraf command zerocombine, combine bias fits.
    lstfn : lst file name
    type : string
    output file : Zero.fits
    """
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.zerocombine(input='o//@' + lstfn,
                     output='Zero',
                     combine='average',
                     reject='minmax',
                     ccdtype='',
                     process=False,
                     delete=False,
                     clobber=False,
                     scale='none',
                     statsec='',
                     nlow=0,
                     nhigh=1,
                     nkeep=1,
                     mclip=True,
                     lsigma=3.0,
                     hsigma=3.0,
                     rdnoise='rdnoise',
                     gain='gain',
                     snoise=0.0,
                     pclip=-0.5,
                     blank=0.0)
示例#3
0
def subzero(imagesre, zero='Zero'):
    '''Run ccdproc remove Zero level noise'''

    imageslist = glob.glob(imagesre)
    imagesin = ', '.join(imageslist)

    # Load packages
    iraf.imred()
    iraf.ccdred()
    # Unlearn previouse settings
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    # setup and run task
    iraf.ccdred.ccdproc.ccdtype = ''
    iraf.ccdred.ccdproc.noproc = False
    iraf.ccdred.ccdproc.fixpix = False
    iraf.ccdred.ccdproc.overscan = False
    iraf.ccdred.ccdproc.darkcor = False
    iraf.ccdred.ccdproc.illumcor = False
    iraf.ccdred.ccdproc.fringecor = False
    iraf.ccdred.ccdproc.readcor = False
    iraf.ccdred.ccdproc.scancor = False
    iraf.ccdred.ccdproc.trim = False
    iraf.ccdred.ccdproc.trimsec = ''
    iraf.ccdred.ccdproc.readaxis = 'line'
    iraf.ccdred.ccdproc.zerocor = True
    iraf.ccdred.ccdproc.zero = zero
    iraf.ccdred.ccdproc.flatcor = False
    iraf.ccdred.ccdproc.flat = ''
    iraf.ccdred.ccdproc(images=imagesin)
示例#4
0
def masterbias(biasre, output='Zero', combine='median', reject='minmax',
        ccdtype='', rdnoise='rdnoise', gain='gain'):
    '''run the task ccdred.zerocombine with chosen parameters

    Input:
    -------
     str biasre: regular expression to identify zero level images

    Output:
    -------
     file Zero.fits: combined zerolevel images
    '''

    biaslist = glob.glob(biasre)
    biasstring = ', '.join(biaslist)

    # load packages
    iraf.imred()
    iraf.ccdred()
    # unlearn settings
    iraf.imred.unlearn()
    iraf.ccdred.unlearn()
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    iraf.ccdred.zerocombine.unlearn()
    iraf.ccdred.setinstrument.unlearn()
    # setup task
    iraf.ccdred.zerocombine.output = output
    iraf.ccdred.zerocombine.combine = combine
    iraf.ccdred.zerocombine.reject = reject
    iraf.ccdred.zerocombine.ccdtype = ccdtype
    iraf.ccdred.zerocombine.rdnoise = rdnoise
    iraf.ccdred.zerocombine.gain = gain
    # run task
    iraf.ccdred.zerocombine(input=biasstring)
示例#5
0
def divflat(imagesre, flat='Flat'):
    '''Run ccdproc task to images'''

    imageslist = glob.glob(imagesre)
    imagesin = ', '.join(imageslist)

    # Load packages
    iraf.imred()
    iraf.ccdred()
    # Unlearn settings
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    # Setup and run task
    iraf.ccdred.ccdproc.ccdtype = ''
    iraf.ccdred.ccdproc.noproc = False
    iraf.ccdred.ccdproc.fixpix = False
    iraf.ccdred.ccdproc.overscan = False
    iraf.ccdred.ccdproc.darkcor = False
    iraf.ccdred.ccdproc.illumcor = False
    iraf.ccdred.ccdproc.fringecor = False
    iraf.ccdred.ccdproc.readcor = False
    iraf.ccdred.ccdproc.scancor = False
    iraf.ccdred.ccdproc.trim = False
    iraf.ccdred.ccdproc.trimsec = ''
    iraf.ccdred.ccdproc.readaxis = 'line'
    iraf.ccdred.ccdproc.zerocor = False
    iraf.ccdred.ccdproc.zero = ''
    iraf.ccdred.ccdproc.flatcor = True
    iraf.ccdred.ccdproc.flat = flat
    iraf.ccdred.ccdproc(images=imagesin)
示例#6
0
def correctimages(imagesre, zero='Zero', flat='nFlat'):
    '''Run ccdproc task to correct images'''

    imageslist = glob.glob(imagesre)
    imagesin = ', '.join(imageslist)

    trimsection = str(raw_input('Enter trim section (or Hit <Enter>): '))
    trimquery = True
    if trimsection == '':
        trimquery = False

    # Load Packages
    iraf.imred()
    iraf.ccdred()
    # Unlearn Settings
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    # Setup and run task
    iraf.ccdred.ccdproc.ccdtype = ''
    iraf.ccdred.ccdproc.noproc = False
    iraf.ccdred.ccdproc.fixpix = False
    iraf.ccdred.ccdproc.overscan = False
    iraf.ccdred.ccdproc.darkcor = False
    iraf.ccdred.ccdproc.illumcor = False
    iraf.ccdred.ccdproc.fringecor = False
    iraf.ccdred.ccdproc.readcor = False
    iraf.ccdred.ccdproc.scancor = False
    iraf.ccdred.ccdproc.trim = trimquery
    iraf.ccdred.ccdproc.trimsec = trimsection
    iraf.ccdred.ccdproc.readaxis = 'line'
    iraf.ccdred.ccdproc.zerocor = True
    iraf.ccdred.ccdproc.zero = zero
    iraf.ccdred.ccdproc.flatcor = True
    iraf.ccdred.ccdproc.flat = flat
    iraf.ccdred.ccdproc(images=imagesin)
示例#7
0
def coroverbiastrim(lstfile):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    x1,x2,y1,y2 = get_trim_sec()
    iraf.ccdproc(images = '@' + lstfile + '//[1]'
        , output = '%bo%bo%@' + lstfile
        , ccdtype = '', max_cache = 0, noproc = False
        , fixpix = False, overscan = True, trim = False
        , zerocor = True, darkcor = False, flatcor = False
        , illumcor = False, fringecor = False, readcor = False
        , scancor = False, readaxis = 'line', fixfile = ''
        , biassec = '[5:45,%s:%s]'%(y1,y2), trimsec = '[%s:%s,%s:%s]'%(x1,x2,y1,y2)
        , zero = 'Zero', dark = '', flat = '', illum = '', fringe = ''
        , minreplace = 1.0, scantype = 'shortscan', nscan = 1
        , interactive = False, function = 'chebyshev', order = 1
        , sample = '*', naverage = 1, niterate = 1
        , low_reject = 3.0, high_reject = 3.0, grow = 1.0)
    iraf.ccdproc(images = '%bo%bo%@' + lstfile
        , output = '%tbo%tbo%@' + lstfile
        , ccdtype = '', max_cache = 0, noproc = False
        , fixpix = False, overscan = False, trim = True
        , zerocor = False, darkcor = False, flatcor = False
        , illumcor = False, fringecor = False, readcor = False
        , scancor = False, readaxis = 'line', fixfile = ''
        , biassec = '[5:45,%s:%s]'%(y1,y2), trimsec = '[%s:%s,%s:%s]'%(x1,x2,y1,y2)
        , zero = 'Zero', dark = '', flat = '', illum = '', fringe = ''
        , minreplace = 1.0, scantype = 'shortscan', nscan = 1
        , interactive = False, function = 'chebyshev', order = 1
        , sample = '*', naverage = 1, niterate = 1
        , low_reject = 3.0, high_reject = 3.0, grow = 1.0)
    iraf.flpr()
示例#8
0
def divflat(imagesre, flat='Flat'):
    '''Run ccdproc task to images'''

    imageslist = glob.glob(imagesre)
    imagesin = ', '.join(imageslist)

    # Load packages
    iraf.imred()
    iraf.ccdred()
    # Unlearn settings
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    # Setup and run task
    iraf.ccdred.ccdproc.ccdtype = ''
    iraf.ccdred.ccdproc.noproc = False
    iraf.ccdred.ccdproc.fixpix = False
    iraf.ccdred.ccdproc.overscan = False
    iraf.ccdred.ccdproc.darkcor = False
    iraf.ccdred.ccdproc.illumcor = False
    iraf.ccdred.ccdproc.fringecor = False
    iraf.ccdred.ccdproc.readcor = False
    iraf.ccdred.ccdproc.scancor = False
    iraf.ccdred.ccdproc.trim = False
    iraf.ccdred.ccdproc.trimsec = ''
    iraf.ccdred.ccdproc.readaxis = 'line'
    iraf.ccdred.ccdproc.zerocor = False
    iraf.ccdred.ccdproc.zero = ''
    iraf.ccdred.ccdproc.flatcor = True
    iraf.ccdred.ccdproc.flat = flat
    iraf.ccdred.ccdproc(images=imagesin)
示例#9
0
def correctimages(imagesre, zero='Zero', flat='nFlat'):
    '''Run ccdproc task to correct images'''

    imageslist = glob.glob(imagesre)
    imagesin = ', '.join(imageslist)

    trimsection = str(raw_input('Enter trim section (or Hit <Enter>): '))
    trimquery = True
    if trimsection == '':
        trimquery = False

    # Load Packages
    iraf.imred()
    iraf.ccdred()
    # Unlearn Settings
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    # Setup and run task
    iraf.ccdred.ccdproc.ccdtype = ''
    iraf.ccdred.ccdproc.noproc = False
    iraf.ccdred.ccdproc.fixpix = False
    iraf.ccdred.ccdproc.overscan = False
    iraf.ccdred.ccdproc.darkcor = False
    iraf.ccdred.ccdproc.illumcor = False
    iraf.ccdred.ccdproc.fringecor = False
    iraf.ccdred.ccdproc.readcor = False
    iraf.ccdred.ccdproc.scancor = False
    iraf.ccdred.ccdproc.trim = trimquery
    iraf.ccdred.ccdproc.trimsec = trimsection
    iraf.ccdred.ccdproc.readaxis = 'line'
    iraf.ccdred.ccdproc.zerocor = True
    iraf.ccdred.ccdproc.zero = zero
    iraf.ccdred.ccdproc.flatcor = True
    iraf.ccdred.ccdproc.flat = flat
    iraf.ccdred.ccdproc(images=imagesin)
示例#10
0
def combine_flat(lstfile):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.flatcombine(input='tbo//@' + lstfile,
                     output='Halogen',
                     combine='average',
                     reject='crreject',
                     ccdtype='',
                     process=False,
                     subsets=False,
                     delete=False,
                     clobber=False,
                     scale='mode',
                     statsec='',
                     nlow=1,
                     nhigh=1,
                     nkeep=1,
                     mclip=True,
                     lsigma=3.0,
                     hsigma=3.0,
                     rdnoise='rdnoise',
                     gain='gain',
                     snoise=0.0,
                     pclip=-0.5,
                     blank=1.0)
示例#11
0
def subzero(imagesre, zero='Zero'):
    '''Run ccdproc remove Zero level noise'''

    imageslist = glob.glob(imagesre)
    imagesin = ', '.join(imageslist)

    # Load packages
    iraf.imred()
    iraf.ccdred()
    # Unlearn previouse settings
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    # setup and run task
    iraf.ccdred.ccdproc.ccdtype = ''
    iraf.ccdred.ccdproc.noproc = False
    iraf.ccdred.ccdproc.fixpix = False
    iraf.ccdred.ccdproc.overscan = False
    iraf.ccdred.ccdproc.darkcor = False
    iraf.ccdred.ccdproc.illumcor = False
    iraf.ccdred.ccdproc.fringecor = False
    iraf.ccdred.ccdproc.readcor = False
    iraf.ccdred.ccdproc.scancor = False
    iraf.ccdred.ccdproc.trim = False
    iraf.ccdred.ccdproc.trimsec = ''
    iraf.ccdred.ccdproc.readaxis = 'line'
    iraf.ccdred.ccdproc.zerocor = True
    iraf.ccdred.ccdproc.zero = zero
    iraf.ccdred.ccdproc.flatcor = False
    iraf.ccdred.ccdproc.flat = ''
    iraf.ccdred.ccdproc(images=imagesin)
示例#12
0
def combine_lamp(lstfile):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.imcombine(input='%ftbo%ftbo%@' + lstfile,
                   output=lstfile.replace('.lst', ''),
                   combine='sum',
                   reject='none')
示例#13
0
def combinelamp(lst):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.imcombine(input='%ftbo%ftbo%@' + lst,
                   output='Lamp',
                   combine='sum',
                   reject='none')
示例#14
0
def initialize_iraf():
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.specred(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    return
示例#15
0
def combinelamp(lst):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.imcombine(input='%ftb%ftb%@' + lst,
                   output='Lamp',
                   combine='sum',
                   reject='none')
    print('<<<<<combine the lamp & generate the Lamp.fits>>>>>')
    iraf.flpr()
示例#16
0
def darkcom(input_dark):
    import glob
    import os, sys
    import numpy as np    
    from pyraf import iraf
    from astropy.io import fits
    from astropy.io import ascii
    curdir = os.getcwd()
    curdate = curdir.split('/')[-1]
    #dark = ", ".join(input_dark)
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.ccdred.setinst(instrume='camera', directo='/iraf/iraf/noao/imred/ccdred/ccddb/', query='q', review='no')
    #iraf.chdir('./dark')
    #dark = glob.glob('cal*dk*.fit')
    #dark = ascii.read('dark.list', guess=True, data_start=0)
    '''
    allexptime = []
    for i in range(len(dark)) :
        hdr = fits.getheader(dark[i])
        allexptime.append(hdr['exptime'])
    expset = set(allexptime)
    exptime = list(sorted(expset))
    i=0
    for i in range(len(exptime)) :
        print('Find images with exptime of '+str(exptime[i]))
        imlist = []
        for j in range(len(dark)) :
            hdr = fits.getheader(dark[j])
            if hdr['exptime'] == exptime[i] :
                imlist.append(dark[j])
            else :
                pass
        print(imlist)
        input_name = 'dark'+str(int(exptime[i]))+'.list'
        output_name = curdate+'_dark'+str(int(exptime[i]))+'.fits'
        #input_name = output_name[:-5]+'.list'
        f=open(input_name,'w+')
        for k in range(len(imlist)) : 
            f.write(imlist[k]+'\n')
        f.close()
    '''
    #output_name = curdate+'_dark'+str(int(exptime[i]))+'.fits'
    output_name = curdate+'_'+input_dark[:-5]+'.fits'
    print('Darkcombine is running...')
    iraf.imstat(images='z@'+input_dark)
    iraf.darkcombine(input='z@'+input_dark, output=output_name, combine='median', reject='minmax', process='no', scale='none', ccdtype='' )
    #os.system('/usr/bin/cp '+output_name+' ../')
    os.system('/usr/bin/cp '+output_name+' /data1/KHAO/MDFTS/red/masterdark/')
    #os.system('/usr/bin/rm d*.list')
    #iraf.chdir('../')
    iraf.dir('.')
    print('Output master '+output_name+' is created.') 
示例#17
0
def combinebias(filename):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.zerocombine(input = 'o//@' + filename
	, output = 'Zero', combine = 'average', reject = 'minmax'
	, ccdtype = '', process = False, delete = False
	, clobber = False, scale = 'none', statsec = ''
	, nlow = 0, nhigh = 1, nkeep = 1, mclip = True
	, lsigma = 3.0, hsigma = 3.0, rdnoise = 'rdnoise'
	, gain = 'gain', snoise = 0.0, pclip = -0.5, blank = 0.0)
示例#18
0
def combine_flat(lstfile):
    if os.path.isfile('Halogen.fits'):
        print 'remove Halogen.fits'
        os.remove('Halogen.fits')
    if os.path.isfile('Resp.fits'):
        print 'remove Resp.fits'
        os.remove('Resp.fits')
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.flatcombine(input='tbo//@' + lstfile,
                     output='Halogen',
                     combine='average',
                     reject='crreject',
                     ccdtype='',
                     process=False,
                     subsets=False,
                     delete=False,
                     clobber=False,
                     scale='mode',
                     statsec='',
                     nlow=1,
                     nhigh=1,
                     nkeep=1,
                     mclip=True,
                     lsigma=3.0,
                     hsigma=3.0,
                     rdnoise='rdnoise',
                     gain='gain',
                     snoise=0.0,
                     pclip=-0.5,
                     blank=1.0)
    iraf.twodspec()
    iraf.longslit(dispaxis=2,
                  nsum=1,
                  observatory='Lijiang',
                  extinction=func.config_path + os.sep + 'LJextinct.dat',
                  caldir=func.std_path + os.sep,
                  interp='poly5')
    iraf.response(calibration='Halogen',
                  normalization='Halogen',
                  response='Resp',
                  interactive=True,
                  threshold='INDEF',
                  sample='*',
                  naverage=1,
                  function='spline3',
                  order=25,
                  low_reject=10.0,
                  high_reject=10.0,
                  niterate=1,
                  grow=0.0,
                  graphics='stdgraph',
                  cursor='')
示例#19
0
def flatcom(input_flat='zFLAT*.fit'):
    import glob
    import os, sys
    import itertools
    import numpy as np
    from pyraf import iraf
    from astropy.io import fits
    from astropy.io import ascii
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.ccdred.setinst(instrume='camera', directo='/iraf/iraf/noao/imred/ccdred/ccddb/', query='q', review='no')
    # Filter classification
    calflat = glob.glob(input_flat)
    allfilter = []
    i=0
    for i in range(len(calflat)) :
        hdr = fits.getheader(calflat[i])
        allfilter.append(hdr['filter'])
    filterset = set(allfilter)
    infilter = list(sorted(filterset))
    i=0
    for i in range(len(infilter)) :
        print('Find images with filter of '+str(infilter[i]))
        imlist = []
        for j in range(len(calflat)) :
            hdr = fits.getheader(calflat[j])
            if hdr['filter'] == infilter[i] :
                imlist.append(calflat[j])
            else :
                pass
        print(imlist)
        imlist.sort()
        input_name = str(infilter[i])+'flat.list'
        k=0
        f=open(input_name,'w+')
        for k in range(len(imlist)) : 
            f.write(imlist[k]+'\n')
        f.close()
        output_name = input_name[:-5]+'.fits'
        #iraf.flatcombine(input='@'+input_name, output=output_name, combine='average', reject='crreject', process='no', scale='mode', ccdtype='', lsigma='3.', hsigma='3.' )
        iraf.flatcombine(input='@'+input_name, output=output_name, combine='median', reject='minmax', process='no', scale='mode', ccdtype='')
        print(output_name+' is created. Normalizing...')
        data, newhdr = fits.getdata(output_name, header=True)
        x = np.mean(data)
        nimage = data/x
        newflat_name = curdate+'_n'+str(infilter[i])+'flat.fits'
        fits.writeto(newflat_name, nimage, header=newhdr, overwrite=True)
        #os.system('/usr/bin/cp '+newflat_name+' ../')
        os.system('/usr/bin/cp '+newflat_name+' /data1/KHAO/MDFTS/red/masterflat_'+infilter[i]+'/')
    print('Normalised master flats are created.')
    iraf.imstat(images='*n?flat.fits')
示例#20
0
def main():
	iraf.noao()
	iraf.imred()
	iraf.ccdred()
        print '=' * 20, 'Overscan', '=' * 20
	correct_overscan('spec.lst')
	print '=' * 20, 'combine bias', '=' * 20
	combine_bias('bias.lst')
	print '=' * 20, 'correct bias', '=' * 20
	correct_bias('spec_no_bias.lst', 'bias_spec.fits')
        name = os.popen('ls object*.lst').readlines()
        name = [i.split()[0] for i in name]
        for i in name:
            ntrim_flat(i)
示例#21
0
def main():
    """
    correct zero and flat for photometric fits
    and also trim the fits.
    finaly get a reduced fits ftb*.fits.(f:flat, t:trim, b:bias)
    """
    print('=' * 50)
    print('<<<run  reduce_img.py  # trim , correct zero & flat>>>>>')

    #删除之前运行的结果
    delete_file(['Zero.fits'])
    pre_lst = glob.glob('b*.fits')
    delete_file(pre_lst)
    pre_lst = glob.glob('tb*.fits')
    delete_file(pre_lst)
    delete_file(['flat.fits'])
    pre_lst = glob.glob('ftb*.fits')
    delete_file(pre_lst)
    print '<<<<<clear all the previous file>>>>>'

    iraf.imred()
    iraf.ccdred(instrument='ccddb$kpno/specphot.dat')

    alllst = [i.strip() for i in file('all.lst')]
    for name in alllst:
        del_wrong_keyword(name.strip())
    print '<<<<<correct the wrong keyword>>>>>'

    print '<<<<<\t combine bias \t>>>>>'
    combine_bias('zero.lst')

    print '<<<<<\t correct Zero \t>>>>>'
    cor_zero('cor_zero.lst')

    print '<<<<<\t trim_section \t>>>>>'
    trimlst = np.loadtxt('trimlst')
    wid = int(trimlst[0])
    hig = int(trimlst[1])
    trim_section = get_trimsec(wid, hig)
    trim('cor_zero.lst', trim_section)

    print '<<<<<\t combine flat \t>>>>>'
    combine_flat('flat.lst')

    print '<<<<<\t correct flat \t>>>>>'
    cor_flat('cor_flat.lst')

    print('<<<<< run reduce_img.py successfully >>>>>  ')
    print('=' * 50)
示例#22
0
def flatresponse(input='Flat', output='nFlat'):
    ''' normalize Flat to correct illumination patterns'''

    iraf.imred()
    iraf.ccdred()
    iraf.specred()

    iraf.ccdred.combine.unlearn()
    iraf.ccdred.ccdproc.unlearn()
    iraf.specred.response.unlearn()
    iraf.specred.response.interactive = True
    iraf.specred.response.function = 'chebyshev'
    iraf.specred.response.order = 1
    iraf.specred.response(calibration=input, normalization=input,
            response=output)
示例#23
0
def main():

    config = {'combine': 'average', 'reject': 'minmax', 'nlow': 3, 'nhigh': 3}

    path = '/btfidr/data/20130902/007/cam2'
    kernel = 'biasEMOFF_A*'
    list_of_files = glob.glob(os.path.join(path, kernel))
    random.shuffle(list_of_files)
    number_of_files = len(list_of_files)

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)

    n, a, s = [], [], []
    for l in range(2, number_of_files):

        temp_list = 'list'
        buffer = open(temp_list, mode='w')
        for i in range(l):
            buffer.write(list_of_files[i] + "\n")
        buffer.close()

        iraf.imcombine(input='@' + temp_list,
                       output='temp_output.fits',
                       **config)

        data = pyfits.getdata('temp_output.fits')[700:899, 700:899]

        n.append(l)
        a.append(np.average(data))
        s.append(np.std(data))

        os.remove('temp_output.fits')
        os.remove(temp_list)

    output = 'output.dat'
    buffer = open(output, mode='w')

    for key in config.keys():
        buffer.write('# {}: {}\n'.format(key, config[key]))

    buffer.write('# N\tAVG\tSTD\n')
    for i in range(len(n)):
        buffer.write(u'{0:3d}\t{1:5.1f}\t{2:5.1f}\n'.format(n[i], a[i], s[i]))

    buffer.close()
    return 0
示例#24
0
def ImportPackages():
	iraf.noao(_doprint=0)
	iraf.rv(_doprint=0)
	iraf.imred(_doprint=0)
	iraf.kpnoslit(_doprint=0)	
	iraf.ccdred(_doprint=0)
	iraf.astutil(_doprint=0)
	
	iraf.keywpars.setParam('ra','CAT-RA') 
	iraf.keywpars.setParam('dec','CAT-DEC')
	iraf.keywpars.setParam('ut','UT')
	iraf.keywpars.setParam('utmiddl','UT-M_E')
	iraf.keywpars.setParam('exptime','EXPTIME')
	iraf.keywpars.setParam('epoch','CAT-EPOC')
	iraf.keywpars.setParam('date_ob','DATE-OBS')
	iraf.keywpars.setParam('hjd','HJD')
	iraf.keywpars.setParam('mjd_obs','MJD-OBS')
	iraf.keywpars.setParam('vobs','VOBS')
	iraf.keywpars.setParam('vrel','VREL')
	iraf.keywpars.setParam('vhelio','VHELIO')
	iraf.keywpars.setParam('vlsr','VLSR')
	iraf.keywpars.setParam('vsun','VSUN')
	iraf.keywpars.setParam('mode','ql')
	iraf.fxcor.setParam('continu','both')
	iraf.fxcor.setParam('filter','none')
	iraf.fxcor.setParam('rebin','smallest')
	iraf.fxcor.setParam('pixcorr','no')
	iraf.fxcor.setParam('apodize','0.2')
	iraf.fxcor.setParam('function','gaussian')
	iraf.fxcor.setParam('width','INDEF')
	iraf.fxcor.setParam('height','0.')
	iraf.fxcor.setParam('peak','no')
	iraf.fxcor.setParam('minwidt','3.')
	iraf.fxcor.setParam('maxwidt','21.')
	iraf.fxcor.setParam('weights','1.')
	iraf.fxcor.setParam('backgro','0.')
	iraf.fxcor.setParam('window','INDEF')
	iraf.fxcor.setParam('wincent','INDEF')
	iraf.fxcor.setParam('verbose','long')
	iraf.fxcor.setParam('imupdat','no')
	iraf.fxcor.setParam('graphic','stdgraph')
	iraf.fxcor.setParam('interac','yes')
	iraf.fxcor.setParam('autowri','yes')
	iraf.fxcor.setParam('autodra','yes')
	iraf.fxcor.setParam('ccftype','image')
	iraf.fxcor.setParam('observa','lapalma')
	iraf.fxcor.setParam('mode','ql')
	return 0
示例#25
0
def set_instrument():

    """
    Needed so that when we do flatcombine iraf doesn't try to do flat correction
    etc. Might need to run before calling any other function. Not sure if specphot
    is appropriate but seems to work.
    """

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)

    iraf.setinstrument.setParam('instrument','specphot')
    iraf.setinstrument()

    return None
示例#26
0
def flatresponse(input='Flat', output='nFlat'):
    ''' normalize Flat to correct illumination patterns'''

    iraf.imred()
    iraf.ccdred()
    iraf.specred()

    iraf.ccdred.combine.unlearn()
    iraf.ccdred.ccdproc.unlearn()
    iraf.specred.response.unlearn()
    iraf.specred.response.interactive = True
    iraf.specred.response.function = 'chebyshev'
    iraf.specred.response.order = 1
    iraf.specred.response(calibration=input,
                          normalization=input,
                          response=output)
示例#27
0
    def the_zerocombine(self,
                        in_file_list,
                        out_file,
                        method="median",
                        rejection="minmax",
                        ccdtype="",
                        gain="",
                        rdnoise=""):

        self.etc.log(
            "Zerocombine started for {} files using combine({}) and rejection({})"
            .format(len(in_file_list), method, rejection))

        try:
            if self.fop.is_file(out_file):
                self.fop.rm(out_file)

            files = []
            for file in in_file_list:
                if self.fit.is_fit(file):
                    files.append(file)

            if not len(files) == 0:
                biases = ",".join(files)
                # Load packages
                # Unlearn settings
                iraf.imred.unlearn()
                iraf.ccdred.unlearn()
                iraf.ccdred.ccdproc.unlearn()
                iraf.ccdred.combine.unlearn()
                iraf.ccdred.zerocombine.unlearn()

                ccdred.instrument = "ccddb$kpno/camera.dat"
                iraf.imred()
                iraf.ccdred()

                iraf.zerocombine(input=biases,
                                 output=out_file,
                                 combine=method,
                                 reject=rejection,
                                 ccdtype=ccdtype,
                                 Stdout="/dev/null")
            else:
                self.etc.log("No files to combine")

        except Exception as e:
            self.etc.log(e)
示例#28
0
def coroverscan(lstfn):
    """
    call iraf command ccdproc, overscan correct.
    lstfn : lst file name
    type : string
    output file : oYF*.fits
    """
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.ccdproc(images='@' + lstfn + '//[1]',
                 output='%o%o%@' + lstfn,
                 ccdtype='',
                 max_cache=0,
                 noproc=False,
                 fixpix=False,
                 overscan=True,
                 trim=False,
                 zerocor=False,
                 darkcor=False,
                 flatcor=False,
                 illumcor=False,
                 fringecor=False,
                 readcor=False,
                 scancor=False,
                 readaxis='line',
                 fixfile='',
                 biassec='[5:45,1:4612]',
                 trimsec='',
                 zero='',
                 dark='',
                 flat='',
                 illum='',
                 fringe='',
                 minreplace=1.0,
                 scantype='shortscan',
                 nscan=1,
                 interactive=False,
                 function='chebyshev',
                 order=1,
                 sample='*',
                 naverage=1,
                 niterate=1,
                 low_reject=3.0,
                 high_reject=3.0,
                 grow=1.0)
    iraf.flpr()
示例#29
0
def corhalogen(lstfile):
    namelst = glob.glob('ftbo*.fits')
    for name in namelst:
        print 'remove ', name
        os.remove(name)
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.ccdproc(images='tbo@' + lstfile,
                 output='%ftbo%ftbo%@' + lstfile,
                 ccdtype='',
                 max_cache=0,
                 noproc=False,
                 fixpix=False,
                 overscan=False,
                 trim=False,
                 zerocor=False,
                 darkcor=False,
                 flatcor=True,
                 illumcor=False,
                 fringecor=False,
                 readcor=False,
                 scancor=False,
                 readaxis='line',
                 fixfile='',
                 biassec='',
                 trimsec='',
                 zero='Zero',
                 dark='',
                 flat='Resp',
                 illum='',
                 fringe='',
                 minreplace=1.0,
                 scantype='shortscan',
                 nscan=1,
                 interactive=False,
                 function='chebyshev',
                 order=1,
                 sample='*',
                 naverage=1,
                 niterate=1,
                 low_reject=3.0,
                 high_reject=3.0,
                 grow=1.0)
    iraf.flpr()
示例#30
0
def corhalogen(lstfile):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.ccdproc(images = 'tbo@' + lstfile
        , output = '%ftbo%ftbo%@' + lstfile
        , ccdtype = '', max_cache = 0, noproc = False
        , fixpix = False, overscan = False, trim = False
        , zerocor = False, darkcor = False, flatcor = True
        , illumcor = False, fringecor = False, readcor = False
        , scancor = False, readaxis = 'line', fixfile = ''
        , biassec = '', trimsec = ''
        , zero = 'Zero', dark = '', flat = 'Resp', illum = '', fringe = ''
        , minreplace = 1.0, scantype = 'shortscan', nscan = 1
        , interactive = False, function = 'chebyshev', order = 1
        , sample = '*', naverage = 1, niterate = 1
        , low_reject = 3.0, high_reject = 3.0, grow = 1.0)
    iraf.flpr()
示例#31
0
def coroverscan(filename):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.ccdproc(images = '@' + filename + '//[1]'
    	, output = '%o%o%@' + filename
	, ccdtype = '', max_cache = 0, noproc = False
	, fixpix = False, overscan = True, trim = False
	, zerocor = False, darkcor = False, flatcor = False
	, illumcor = False, fringecor = False, readcor = False
	, scancor = False, readaxis = 'line', fixfile = ''
	, biassec = '[5:45,1:4612]', trimsec = '', zero = ''
	, dark = '', flat = '', illum = '', fringe = ''
	, minreplace = 1.0, scantype = 'shortscan', nscan = 1
	, interactive = False, function = 'chebyshev', order = 1
	, sample = '*', naverage = 1, niterate = 1
	, low_reject = 3.0, high_reject = 3.0, grow = 1.0)
    iraf.flpr()
示例#32
0
def runapall(imagesre, gain='gain', rdnoise='rdnoise'):
    '''Extract aperture spectra for science images ...'''

    imageslist = glob.glob(imagesre)
    imagesin = ', '.join(imageslist)

    # load packages
    iraf.imred()
    iraf.ccdred()
    iraf.specred()
    # unlearn previous settings
    iraf.ccdred.combine.unlearn()
    iraf.ccdred.ccdproc.unlearn()
    iraf.specred.apall.unlearn()
    # setup and run task
    iraf.specred.apall.format = 'onedspec'
    iraf.specred.apall.readnoise = rdnoise
    iraf.specred.apall.gain = gain
    iraf.specred.apall(input=imagesin)
示例#33
0
def runapall(imagesre, gain='gain', rdnoise='rdnoise'):
    '''Extract aperture spectra for science images ...'''

    imageslist = glob.glob(imagesre)
    imagesin = ', '.join(imageslist)

    # load packages
    iraf.imred()
    iraf.ccdred()
    iraf.specred()
    # unlearn previous settings
    iraf.ccdred.combine.unlearn()
    iraf.ccdred.ccdproc.unlearn()
    iraf.specred.apall.unlearn()
    # setup and run task
    iraf.specred.apall.format = 'onedspec'
    iraf.specred.apall.readnoise = rdnoise
    iraf.specred.apall.gain = gain
    iraf.specred.apall(input=imagesin)
示例#34
0
def masterflat(flatre,
               output='Flat',
               combine='median',
               reject='sigclip',
               scale='mode',
               rdnoise='rdnoise',
               gain='gain'):
    '''combine flat images with the task ccdred.flatcombine

    Input:
    -------
     str: flatre - regular expression to bias files in the current directory

    Output:
    -------
     file: Flat.fits - combined flat field images
    '''

    flatlist = glob.glob(flatre)
    flatstring = ', '.join(flatlist)

    # load packages
    iraf.imred()
    iraf.ccdred()
    # unlearn settings
    iraf.imred.unlearn()
    iraf.ccdred.unlearn()
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    iraf.ccdred.flatcombine.unlearn()
    iraf.ccdred.setinstrument.unlearn()
    # setup task
    iraf.ccdred.flatcombine.output = output
    iraf.ccdred.flatcombine.combine = combine
    iraf.ccdred.flatcombine.reject = reject
    iraf.ccdred.flatcombine.ccdtype = ''
    iraf.ccdred.flatcombine.process = 'no'
    iraf.ccdred.flatcombine.subsets = 'yes'
    iraf.ccdred.flatcombine.scale = scale
    iraf.ccdred.flatcombine.rdnoise = rdnoise
    iraf.ccdred.flatcombine.gain = gain
    iraf.ccdred.flatcombine(input=flatstring)
示例#35
0
def masterbias(biasre,
               output='Zero',
               combine='median',
               reject='minmax',
               ccdtype='',
               rdnoise='rdnoise',
               gain='gain'):
    '''run the task ccdred.zerocombine with chosen parameters

    Input:
    -------
     str biasre: regular expression to identify zero level images

    Output:
    -------
     file Zero.fits: combined zerolevel images
    '''

    biaslist = glob.glob(biasre)
    biasstring = ', '.join(biaslist)

    # load packages
    iraf.imred()
    iraf.ccdred()
    # unlearn settings
    iraf.imred.unlearn()
    iraf.ccdred.unlearn()
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    iraf.ccdred.zerocombine.unlearn()
    iraf.ccdred.setinstrument.unlearn()
    # setup task
    iraf.ccdred.zerocombine.output = output
    iraf.ccdred.zerocombine.combine = combine
    iraf.ccdred.zerocombine.reject = reject
    iraf.ccdred.zerocombine.ccdtype = ccdtype
    iraf.ccdred.zerocombine.rdnoise = rdnoise
    iraf.ccdred.zerocombine.gain = gain
    # run task
    iraf.ccdred.zerocombine(input=biasstring)
示例#36
0
def flat_correction(targetdir,flatdir):

    """
    Flat field correction
    """

    print 'Target directory is ' + targetdir
    print 'Flat directory is ' + flatdir
    print 'Applying flat field correction...'

    innames, outnames = [], []

    for n in os.listdir(targetdir):
        if (n.endswith('.fit')) & (n.startswith('cr')):
            outnames.append( os.path.join(targetdir,'f' + n) )
            innames.append( os.path.join( targetdir, n + '[1]') )
            if os.path.exists( os.path.join( targetdir, 'f' + n) ):
                print 'Removing file ' + os.path.join( targetdir, 'f' + n)
                os.remove( os.path.join( targetdir, 'f' + n) )

    with open( os.path.join(targetdir,'input.list'), 'w') as f:
        for name in innames:
            f.write( name + '\n' )

    with open( os.path.join(targetdir,'output.list'), 'w') as f:
        for name in outnames:
            f.write( name + '\n' )

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)

    iraf.ccdproc.setParam('images', '@' + os.path.join(targetdir, 'input.list') )
    iraf.ccdproc.setParam('flatcor','yes')
    iraf.ccdproc.setParam('flat', os.path.join(flatdir,'nFlat.fits') )
    iraf.ccdproc.setParam('output', '@' + os.path.join(targetdir, 'output.list'))

    iraf.ccdproc()

    return None
示例#37
0
def flatcom(inim_list,
            outim_list,
            combine='median',
            reject='sigclip',
            scale='mode'):
    import os, sys
    from pyraf import iraf
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.ccdred.setinst(instrume='camera',
                        directo='/iraf/iraf/noao/imred/ccdred/ccddb/',
                        query='q',
                        review='no')
    iraf.flatcombine(input=inim_list,
                     output=outim_list,
                     combine=combine,
                     reject=reject,
                     process='no',
                     scale=scale,
                     ccdtype='')
    print('Output masterflat is created.')
示例#38
0
def biascom(input_bias='bias.list'):
    """
    1. Description 
    : This function makes a master bias image of SAO 1-m using Pyraf. Put bias images to 201XXXXX/bias/ directory. Run this code on 201XXXXX directory, then pyraf chdir task enter bias directory and makes process. Output image is zero.fits and it will be copied on upper directory. Due to iraf.chdir task, you should reset python when this code is finished in order to make confusion of current directory between iraf and python! 
    
    2. Usage 
    : Start on 2018XXXX directory. Make bias directory which contains each bias frame. Naming of each bias images should be cal*bias.fit. Then just use SAO_biascom()
    
    >>> SAO_biascom()

    3. History
    2018.03    Created by G.Lim.
    2018.12.17 Edited by G.Lim. Define SAO_biascom function. 
    2019.02.07 Assign archive of masterbias in each date by G. Lim
    """
    import glob
    import os, sys
    import numpy as np
    from pyraf import iraf
    from astropy.io import fits
    curdir = os.getcwd()
    curdate = curdir.split('/')[-1]
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.ccdred.setinst(instrume='camera', directo='/iraf/iraf/noao/imred/ccdred/ccddb/', query='q', review='no')
    #input_name = 'bias.list'
    output_name = curdate+'_zero.fits'
    #calibrations = input_bias
    #f    = open(input_name,'w+')
    #for i in range(len(calibrations)):    
    #    f.write(calibrations[i]+'\n')
    #f.close()
    print('Zerocombine is running...')
    iraf.imstat(images='@'+input_bias)
    iraf.zerocombine(input='@'+input_bias, output=output_name, combine='median', reject='minmax', process='no', scale='none', ccdtype='' )
    print('Output master '+output_name+' is created.')
    os.system('/usr/bin/cp '+output_name+' /data1/KHAO/MDFTS/red/masterbias/')
    iraf.dir('.')
示例#39
0
def combine_flat(lstfile):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.flatcombine(input = 'tbo//@' + lstfile
    	, output = 'Halogen', combine = 'average', reject = 'crreject'
    	, ccdtype = '', process = False, subsets = False
    	, delete = False, clobber = False, scale = 'mode'
    	, statsec = '', nlow = 1, nhigh = 1, nkeep = 1
    	, mclip = True, lsigma = 3.0, hsigma = 3.0
    	, rdnoise = 'rdnoise', gain = 'gain', snoise = 0.0
    	, pclip = -0.5, blank = 1.0)
    script_path = os.path.split(os.path.realpath(__file__))[0]
    iraf.twodspec()
    iraf.longslit(dispaxis = 2, nsum = 1, observatory = 'observatory'
        , extinction = script_path + os.sep + 'LJextinct.dat'
        , caldir = script_path + os.sep + 'standarddir' + os.sep, interp = 'poly5')
    iraf.response(calibration = 'Halogen'
    	, normalization = 'Halogen', response = 'Resp'
    	, interactive = True, threshold = 'INDEF', sample = '*'
    	, naverage = 1, function = 'spline3', order = 25
    	, low_reject = 10.0, high_reject = 10.0, niterate = 1
    	, grow = 0.0, graphics = 'stdgraph', cursor = '')
示例#40
0
def masterflat(flatre, output='Flat', combine='median', reject='sigclip',
               scale='mode', rdnoise='rdnoise', gain='gain'):
    '''combine flat images with the task ccdred.flatcombine

    Input:
    -------
     str: flatre - regular expression to bias files in the current directory

    Output:
    -------
     file: Flat.fits - combined flat field images
    '''

    flatlist = glob.glob(flatre)
    flatstring = ', '.join(flatlist)

    # load packages
    iraf.imred()
    iraf.ccdred()
    # unlearn settings
    iraf.imred.unlearn()
    iraf.ccdred.unlearn()
    iraf.ccdred.ccdproc.unlearn()
    iraf.ccdred.combine.unlearn()
    iraf.ccdred.flatcombine.unlearn()
    iraf.ccdred.setinstrument.unlearn()
    # setup task
    iraf.ccdred.flatcombine.output = output
    iraf.ccdred.flatcombine.combine = combine
    iraf.ccdred.flatcombine.reject = reject
    iraf.ccdred.flatcombine.ccdtype = ''
    iraf.ccdred.flatcombine.process = 'no'
    iraf.ccdred.flatcombine.subsets = 'yes'
    iraf.ccdred.flatcombine.scale = scale
    iraf.ccdred.flatcombine.rdnoise = rdnoise
    iraf.ccdred.flatcombine.gain = gain
    iraf.ccdred.flatcombine(input=flatstring)
示例#41
0
def floydsautoredu(files,_interactive,_dobias,_doflat,_listflat,_listbias,_listarc,_cosmic,_ext_trace,_dispersionline,liststandard,listatmo,_automaticex,_classify=False,_verbose=False,smooth=1,fringing=1):
    import floyds
    import string,re,os,glob,sys,pickle
    from numpy import array, arange, mean,pi,arccos,sin,cos,argmin
    from astropy.io import fits
    from pyraf import iraf
    import datetime
    os.environ["PYRAF_BETA_STATUS"] = "1"
    iraf.set(direc=floyds.__path__[0]+'/')
    _extinctdir='direc$standard/extinction/'
    _tel=floyds.util.readkey3(floyds.util.readhdr(re.sub('\n','',files[0])),'TELID')
    if _tel=='fts':
        _extinction='ssoextinct.dat'
        _observatory='sso'
    elif _tel=='ftn':
        _extinction='maua.dat' 
        _observatory='cfht'   
    else: sys.exit('ERROR: observatory not recognised')
    dv=floyds.util.dvex()
    scal=pi/180.
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    iraf.specred(_doprint=0)
    toforget = ['ccdred.flatcombine','ccdred.zerocombine','ccdproc','specred.apall','longslit.identify','longslit.reidentify',\
                    'specred.standard','longslit.fitcoords','specred.transform','specred.response']
    for t in toforget: iraf.unlearn(t)
    iraf.longslit.dispaxi=2
    iraf.longslit.mode='h'
    iraf.identify.fwidth=7 
    iraf.identify.order=2 
    iraf.specred.dispaxi=2
    iraf.specred.mode='h'
    iraf.ccdproc.darkcor='no'
    iraf.ccdproc.fixpix='no'
    iraf.ccdproc.trim='no'
    iraf.ccdproc.flatcor='no'
    iraf.ccdproc.overscan='no'
    iraf.ccdproc.zerocor='no'
    iraf.ccdproc.biassec=''
    iraf.ccdproc.ccdtype=''
    iraf.ccdred.instrument = "/dev/null"
    if _verbose: 
        iraf.ccdred.verbose='yes'
        iraf.specred.verbose='yes'
    else: 
        iraf.specred.verbose='no'
        iraf.ccdred.verbose='no'
    now=datetime.datetime.now()
    datenow=now.strftime('20%y%m%d%H%M')
    MJDtoday=55928+(datetime.date.today()-datetime.date(2012, 01, 01)).days
    outputlist=[]
    hdra=floyds.util.readhdr(re.sub('\n','',files[0]))
    _gain=floyds.util.readkey3(hdra,'gain')
    _rdnoise=floyds.util.readkey3(hdra,'ron')
    std,rastd,decstd,magstd=floyds.util.readstandard('standard_floyds_mab.txt')
    _naxis2=hdra.get('NAXIS2')
    _naxis1=hdra.get('NAXIS1')
    if not _naxis1: _naxis1=2079
    if not _naxis2: 
        if not hdr0.get('HDRVER'):   _naxis1=511
        else:                        _naxis1=512
    _overscan='[2049:'+str(_naxis1)+',1:'+str(_naxis2)+']'
    _biassecblu='[380:2048,325:'+str(_naxis2)+']'    
    _biassecred='[1:1800,1:350]'    
    lista={}
    objectlist={}
    biaslist={}
    flatlist={}
    flatlistd={}
    arclist={}
    max_length=14
    for img in files:
        hdr0=floyds.util.readhdr(img)
        if  floyds.util.readkey3(hdr0,'naxis2')>=500:
            if 'blu' not in lista: lista['blu']=[]
            if 'red' not in lista: lista['red']=[]
            _object0=floyds.util.readkey3(hdr0,'object')
            _object0 = re.sub(':', '', _object0) # colon
            _object0 = re.sub('/', '', _object0) # slash
            _object0 = re.sub('\s', '', _object0) # any whitespace
            _object0 = re.sub('\(', '', _object0) # open parenthesis
            _object0 = re.sub('\[', '', _object0) # open square bracket
            _object0 = re.sub('\)', '', _object0) # close parenthesis
            _object0 = re.sub('\]', '', _object0) # close square bracket
            _object0 = _object0.replace(r'\t', '') # Any tab characters
            _object0 = _object0.replace('*', '') # Any asterisks

            if len(_object0) > max_length:
                _object0 = _object0[:max_length]
            _date0=floyds.util.readkey3(hdr0,'date-night')
            _tel=floyds.util.readkey3(hdr0,'TELID')
            _type=floyds.util.readkey3(hdr0,'OBSTYPE')
            if not _type:    _type=floyds.util.readkey3(hdr0,'imagetyp')
            _slit=floyds.util.readkey3(hdr0,'slit')
            if _type:
                _type = _type.lower()
                if _type in ['sky','spectrum','expose']:
                    nameoutb=str(_object0)+'_'+_tel+'_'+str(_date0)+'_blue_'+str(_slit)+'_'+str(MJDtoday)
                    nameoutr=str(_object0)+'_'+_tel+'_'+str(_date0)+'_red_'+str(_slit)+'_'+str(MJDtoday)
                elif _type in ['lamp','arc','l']:
                    nameoutb='arc_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_blue_'+str(_slit)+'_'+str(MJDtoday)
                    nameoutr='arc_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_red_'+str(_slit)+'_'+str(MJDtoday)
                elif _type in ['flat','f','lampflat','lamp-flat']:
                    nameoutb='flat_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_blue_'+str(_slit)+'_'+str(MJDtoday)
                    nameoutr='flat_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_red_'+str(_slit)+'_'+str(MJDtoday)
                else:
                    nameoutb=str(_type.lower())+'_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_blue_'+str(_slit)+'_'+str(MJDtoday)
                    nameoutr=str(_type.lower())+'_'+str(_object0)+'_'+_tel+'_'+str(_date0)+'_red_'+str(_slit)+'_'+str(MJDtoday)

                bimg=floyds.util.name_duplicate(img,nameoutb,'')
                rimg=floyds.util.name_duplicate(img,nameoutr,'')
####
                floyds.util.delete(bimg)
                floyds.util.delete(rimg)
                iraf.imcopy(img,bimg,verbose='no')
                iraf.imcopy(img,rimg,verbose='no')

                aaa=iraf.hedit(bimg,'CCDSEC',delete='yes',update='yes',verify='no',Stdout=1)
                aaa=iraf.hedit(bimg,'TRIMSEC',delete='yes',update='yes',verify='no',Stdout=1)
                aaa=iraf.hedit(rimg,'CCDSEC',delete='yes',update='yes',verify='no',Stdout=1)
                aaa=iraf.hedit(rimg,'TRIMSEC',delete='yes',update='yes',verify='no',Stdout=1)

                iraf.ccdproc(bimg,output='', overscan="yes", trim="yes", zerocor='no', flatcor='no', zero='', ccdtype='',\
                                 fixpix='no', trimsec=_biassecblu, biassec=_overscan, readaxi='line', Stdout=1)
                iraf.ccdproc(rimg,output='', overscan="yes", trim="yes", zerocor='no', flatcor='no', zero='', ccdtype='',\
                                 fixpix='no', trimsec=_biassecred, biassec=_overscan, readaxi='line', Stdout=1)
                floyds.util.updateheader(bimg,0,{'GRISM':['blu',' blue order']})
                floyds.util.updateheader(rimg,0,{'GRISM':['red',' blue order']})
                floyds.util.updateheader(bimg,0,{'arcfile':[img,'file name in the archive']})
                floyds.util.updateheader(rimg,0,{'arcfile':[img,'file name in the archive']})
                lista['blu'].append(bimg)
                lista['red'].append(rimg)
            else: 
                print 'warning type not defined'
    for arm in lista.keys():
        for img in lista[arm]:
            print img
            hdr=floyds.util.readhdr(img)
            _type=floyds.util.readkey3(hdr,'OBSTYPE')
            if _type=='EXPOSE':  
                      _type=floyds.util.readkey3(hdr,'imagetyp')
                      if not _type: _type='EXPOSE'

            if _type=='EXPOSE':  
                print 'warning obstype still EXPOSE, are this old data ?  run manually floydsfixheader'

            _slit=floyds.util.readkey3(hdr,'slit')
            _grpid=floyds.util.readkey3(hdr,'grpid')
            if _type.lower() in ['flat','f','lamp-flat','lampflat'] :
                if (arm,_slit) not in flatlist:  flatlist[arm,_slit]={}
                if _grpid not in flatlist[arm,_slit]: flatlist[arm,_slit][_grpid]=[img]
                else: flatlist[arm,_slit][_grpid].append(img)
            elif _type.lower() in ['lamp','l','arc']:
                if (arm,_slit) not in arclist:  arclist[arm,_slit]={}
                if _grpid not in arclist[arm,_slit]: arclist[arm,_slit][_grpid]=[img]
                else: arclist[arm,_slit][_grpid].append(img)
            elif _type in ['bias','b']:
                if arm not in biaslist: biaslist[arm]=[]
                biaslist[arm].append(img)
            elif _type.lower() in ['sky','s','spectrum']:
                try:
                    _ra=float(floyds.util.readkey3(hdr,'RA'))
                    _dec=float(floyds.util.readkey3(hdr,'DEC'))
                except:
                    ra00=string.split(floyds.util.readkey3(hdr,'RA'),':')
                    ra0,ra1,ra2=float(ra00[0]),float(ra00[1]),float(ra00[2])
                    _ra=((ra2/60.+ra1)/60.+ra0)*15.
                    dec00=string.split(floyds.util.readkey3(hdr,'DEC'),':')
                    dec0,dec1,dec2=float(dec00[0]),float(dec00[1]),float(dec00[2])
                    if '-' in str(dec0):       _dec=(-1)*((dec2/60.+dec1)/60.+((-1)*dec0))
                    else:                      _dec=(dec2/60.+dec1)/60.+dec0
                dd=arccos(sin(_dec*scal)*sin(decstd*scal)+cos(_dec*scal)*cos(decstd*scal)*cos((_ra-rastd)*scal))*((180/pi)*3600)
                if _verbose:
                    print _ra,_dec
                    print std[argmin(dd)],min(dd)
                if min(dd)<5200: _typeobj='std'
                else: _typeobj='obj'
                if min(dd)<5200:
                    floyds.util.updateheader(img,0,{'stdname':[std[argmin(dd)],'']})
                    floyds.util.updateheader(img,0,{'magstd':[float(magstd[argmin(dd)]),'']})
                if _typeobj not in objectlist:      objectlist[_typeobj]={}

                if (arm,_slit) not in objectlist[_typeobj]:     objectlist[_typeobj][arm,_slit]=[img]
                else: objectlist[_typeobj][arm,_slit].append(img)
    if _verbose:
        print 'object'
        print objectlist
        print 'flat'
        print flatlist
        print 'bias'
        print biaslist
        print 'arc'
        print arclist

    if liststandard and 'std' in objectlist.keys():  
        print 'external standard, raw standard not used'
        del objectlist['std']

    sens={}
    outputfile={}
    atmo={}
    for tpe in objectlist:
      if tpe not in outputfile:  outputfile[tpe]={}
      for setup in objectlist[tpe]:
        if setup not in sens:   sens[setup]=[]
        print '\n### setup= ',setup,'\n### objects= ',objectlist[tpe][setup],'\n'
        for img in objectlist[tpe][setup]:
              print '\n\n### next object= ',img,' ',floyds.util.readkey3(floyds.util.readhdr(img),'object'),'\n'
              hdr=floyds.util.readhdr(img)
              archfile=floyds.util.readkey3(hdr,'arcfile')
              _gain=floyds.util.readkey3(hdr,'gain')
              _rdnoise=floyds.util.readkey3(hdr,'ron')
              _grism=floyds.util.readkey3(hdr,'grism')
              _grpid=floyds.util.readkey3(hdr,'grpid')
              if archfile not in outputfile[tpe]: outputfile[tpe][archfile]=[]
#####################      flat   ###############
              if _listflat:   flatgood=_listflat    # flat list from reducer
              elif setup in flatlist:  
                  if _grpid in flatlist[setup]:
                      print '\n###FLAT WITH SAME GRPID'
                      flatgood= flatlist[setup][_grpid]     # flat in the  raw data
                  else:  
                      flatgood=[]
                      for _grpid0 in flatlist[setup].keys():
                          for ii in flatlist[setup][_grpid0]:
                              flatgood.append(ii)
              else: flatgood=[]
              if len(flatgood)!=0:
                  if len(flatgood)>1:
                      f=open('_oflatlist','w')
                      for fimg in flatgood:
                          print fimg
                          f.write(fimg+'\n')
                      f.close()
                      floyds.util.delete('flat'+img)
                      iraf.ccdred.flatcombine('"@_oflatlist"',output='flat'+img,combine='average',reject='none',ccdtype=' ',rdnoise=_rdnoise,gain=_gain, process='no', Stdout=1)
                      floyds.util.delete('_oflatlist')
                      flatfile='flat'+img
                  elif len(flatgood)==1:
                      os.system('cp '+flatgood[0]+' flat'+img)
                      flatfile='flat'+img
              else: flatfile=''
##########################   find arcfile            #######################
              arcfile=''
              if _listarc:       arcfile= [floyds.util.searcharc(img,_listarc)[0]][0]   # take arc from list 
              if not arcfile and setup in arclist.keys():
                    if _grpid in arclist[setup]:  
                        print '\n###ARC WITH SAME GRPID'
                        arcfile= arclist[setup][_grpid]     # flat in the  raw data
                    else:  
                        arcfile=[]
                        for _grpid0 in arclist[setup].keys():
                            for ii in arclist[setup][_grpid0]:
                                arcfile.append(ii)                   
              if arcfile:
                  if len(arcfile)>1:                           # more than one arc available
                      print arcfile
#                     _arcclose=floyds.util.searcharc(imgex,arcfile)[0]   # take the closest in time 
                      _arcclose=floyds.sortbyJD(arcfile)[-1]               #  take the last arc of the sequence
                      if _interactive.upper() in ['YES','Y']:
                              for ii in floyds.floydsspecdef.sortbyJD(arcfile):
                                  print '\n### ',ii 
                              arcfile=raw_input('\n### more than one arcfile available, which one to use ['+str(_arcclose)+'] ? ')
                              if not arcfile: arcfile=_arcclose
                      else: arcfile=_arcclose
                  else: arcfile=arcfile[0]
              else:   print '\n### Warning: no arc found'

###################################################################   rectify 
              if setup[0]=='red':
                  fcfile=floyds.__path__[0]+'/standard/ident/fcrectify_'+_tel+'_red'
                  fcfile1=floyds.__path__[0]+'/standard/ident/fcrectify1_'+_tel+'_red'
                  print fcfile
              else:
                  fcfile=floyds.__path__[0]+'/standard/ident/fcrectify_'+_tel+'_blue'
                  fcfile1=floyds.__path__[0]+'/standard/ident/fcrectify1_'+_tel+'_blue'
                  print fcfile
              print img,arcfile,flatfile
              img0=img
              if img      and img not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(img)
              if arcfile  and arcfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(arcfile)
              if flatfile and flatfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(flatfile)

              img,arcfile,flatfile=floyds.floydsspecdef.rectifyspectrum(img,arcfile,flatfile,fcfile,fcfile1,'no',_cosmic)
              if img      and img not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(img)
              if arcfile  and arcfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(arcfile)
              if flatfile and flatfile not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(flatfile)
###################################################################         check wavecalib  
              if tpe=='std' or floyds.util.readkey3(floyds.util.readhdr(img),'exptime') < 300:
                  if setup[0]=='red':
                      print '\n### check standard wave calib'
                      data, hdr = fits.getdata(img, 0, header=True) 
                      y=data.mean(1)
                      import numpy as np
                      if np.argmax(y) < 80 and np.argmax(y) > 15:                      
                          y2=data[np.argmax(y)-3:np.argmax(y)+3].mean(0)
                          yy2=data[np.argmax(y)-9:np.argmax(y)-3].mean(0)
                          floyds.util.delete('_std.fits')
                          fits.writeto('_std.fits', np.float32(y2-yy2), hdr)
                          shift=floyds.floydsspecdef.checkwavestd('_std.fits',_interactive,2)
                          zro=hdr['CRVAL1']
                          floyds.util.updateheader(img,0,{'CRVAL1':[zro+int(shift),'']})
                          floyds.util.updateheader(img,0,{'shift':[float(shift),'']})
                          floyds.util.delete('_std.fits')
                      else:
                          print 'object not found'
                  else: 
                      print '\n### warning check in wavelength not possible for short exposure in the blu range '
              else:
                    print '\n### check object wave calib'
                    _skyfile=floyds.__path__[0]+'/standard/ident/sky_'+setup[0]+'.fits'
                    data, hdr = fits.getdata(img, 0, header=True) 
                    y=data.mean(1)
                    import numpy as np
                    if np.argmax(y) < 80 and np.argmax(y) > 15:
                        yy1=data[10:np.argmax(y)-9].mean(0)
                        yy2=data[np.argmax(y)+9:-10].mean(0)
                        floyds.util.delete('_sky.fits')
                        fits.writeto('_sky.fits', np.float32(yy1+yy2), hdr)
                        shift=floyds.floydsspecdef.checkwavelength_obj('_sky.fits',_skyfile,_interactive,usethirdlayer=False)
                        floyds.util.delete('_sky.fits')
                        zro=hdr['CRVAL1']
                        floyds.util.updateheader(img,0,{'CRVAL1':[zro+int(shift),'']})
                        floyds.util.updateheader(img,0,{'shift':[float(shift),'']})
                    else:  print 'object not found'
####################################################     flat field
              if img and flatfile and setup[0]=='red':
                      imgn='n'+img
                      hdr1 = floyds.readhdr(img)
                      hdr2 = floyds.readhdr(flatfile)
                      _grpid1=floyds.util.readkey3(hdr1,'grpid')
                      _grpid2=floyds.util.readkey3(hdr2,'grpid')
                      if _grpid1==_grpid2:
                          print flatfile,img,setup[0]
                          imgn=floyds.fringing_classicmethod2(flatfile,img,'no','*',15,setup[0])
                      else:
                          print 'Warning flat not the same OB'
                          imgex=floyds.floydsspecdef.extractspectrum(img,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex)
                          floyds.delete('flat'+imgex)
                          iraf.specred.apsum(flatfile,output='flat'+imgex,referen=img,interac='no',find='no',recente='no',resize='no',\
                                             edit='no',trace='no',fittrac='no',extract='yes',extras='no',review='no',backgro='none')
                          fringingmask=floyds.normflat('flat'+imgex)
                          print '\n### fringing correction'
                          print imgex,fringingmask
                          imgex,scale,shift=floyds.correctfringing_auto(imgex,fringingmask)  #  automatic correction
                          shift=int(.5+float(shift)/3.5)        # shift from correctfringing_auto in Angstrom
                          print '\n##### flat scaling: ',str(scale),str(shift)
########################################################
                          datax, hdrx = fits.getdata(flatfile, 0, header=True)
                          xdim=hdrx['NAXIS1']
                          ydim=hdrx['NAXIS2']
                          iraf.specred.apedit.nsum=15 
                          iraf.specred.apedit.width=100.  
                          iraf.specred.apedit.line=1024 
                          iraf.specred.apfind.minsep=20.  
                          iraf.specred.apfind.maxsep=1000.  
                          iraf.specred.apresize.bkg='no' 
                          iraf.specred.apresize.ylevel=0.5 
                          iraf.specred.aptrace.nsum=10
                          iraf.specred.aptrace.step=10
                          iraf.specred.aptrace.nlost=10
                          floyds.util.delete('n'+flatfile)
                          floyds.util.delete('norm.fits')
                          floyds.util.delete('n'+img)
                          floyds.util.delete(re.sub('.fits','c.fits',flatfile))
                          iraf.imcopy(flatfile+'[500:'+str(xdim)+',*]',re.sub('.fits','c.fits',flatfile),verbose='no')
                          iraf.imarith(flatfile,'/',flatfile,'norm.fits',verbose='no')
                          flatfile=re.sub('.fits','c.fits',flatfile)
                          floyds.util.delete('n'+flatfile)
                          iraf.unlearn(iraf.specred.apflatten)
                          floyds.floydsspecdef.aperture(flatfile)
                          iraf.specred.apflatten(flatfile,output='n'+flatfile,interac=_interactive,find='no',recenter='no', resize='no',edit='no',trace='no',\
                                                 fittrac='no',fitspec='no', flatten='yes', aperture='',\
                                                 pfit='fit2d',clean='no',function='legendre',order=15,sample = '*', mode='ql')
                          iraf.imcopy('n'+flatfile,'norm.fits[500:'+str(xdim)+',*]',verbose='no')
                          floyds.util.delete('n'+flatfile)
                          floyds.util.delete('n'+img)
                          iraf.imrename('norm.fits','n'+flatfile,verbose='no')
                          imgn=floyds.floydsspecdef.applyflat(img,'n'+flatfile,'n'+img,scale,shift)
              else:                  imgn=''

              if imgn and imgn not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgn)

###################################################      2D flux calib
              hdr=floyds.util.readhdr(img)
              _sens=''
              if liststandard:  _sens=floyds.util.searchsens(img,liststandard)[0]   # search in the list from reducer
              if not _sens:
                  try:      _sens=floyds.util.searchsens(img,sens[setup])[0]        # search in the reduced data
                  except:   _sens=floyds.util.searchsens(img,'')[0]              # search in tha archive
              if _sens:
                  if _sens[0]=='/': 
                      os.system('cp '+_sens+' .')
                      _sens=string.split(_sens,'/')[-1]
                  imgd=fluxcalib2d(img,_sens)
                  if imgn:     imgdn=fluxcalib2d(imgn,_sens)
                  else: imgdn=''
                  if _sens not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(_sens)
                  else:        imgdn='' 
                  print '\n### do 2D calibration'
              else:
                  imgd=''
                  imgdn=''
################    extraction         ####################################
              if imgdn:
                  try:
                      imgdnex=floyds.floydsspecdef.extractspectrum(imgdn,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex)
                  except Exception as e:
                      print 'failed to extract', imgdn
                      print e
                      imgdnex=''
              else:       
                  imgdnex=''
              if imgd:
                  try:
                      imgdex=floyds.floydsspecdef.extractspectrum(imgd,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex)  
                  except Exception as e:
                      print 'failed to extract', imgd
                      print e
                      imgdex=''
              else:
                  imgdex=''
              if imgd    and imgd    not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgd)
              if imgdn   and imgdn   not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdn)
              if imgdnex and imgdnex not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdnex)
              if imgdex  and imgdex  not in outputfile[tpe][archfile]: outputfile[tpe][archfile].append(imgdex)
              if tpe=='std':
                  if imgn:
                      try:
                          imgnex=floyds.floydsspecdef.extractspectrum(imgn,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex)  
                      except Exception as e:
                          print 'failed to extract', imgn
                          print e
                          imgnex=''
                  elif img:
                      try:
                          imgnex=floyds.floydsspecdef.extractspectrum(img,dv,_ext_trace,_dispersionline,_interactive,tpe,automaticex=_automaticex)  
                      except Exception as e:
                          print 'failed to extract', img
                          print e
                          imgnex=''
                  if imgnex:
                    hdrs=floyds.util.readhdr(imgnex)
                    _tel=floyds.util.readkey3(hdrs,'TELID')
                    try:
                      _outputsens2='sens_'+_tel+'_'+str(floyds.util.readkey3(hdrs,'date-night'))+'_'+str(floyds.util.readkey3(hdrs,'grism'))+\
                          '_'+re.sub('.dat','',floyds.util.readkey3(hdrs,'stdname'))+'_'+str(MJDtoday)
                    except:  sys.exit('Error: missing header -stdname- in standard '+str(standardfile)+'  ')                          
                    print '\n### compute sensitivity function and atmofile'
                    if setup[0]=='red':
                          atmofile=floyds.floydsspecdef.telluric_atmo(imgnex)
                          if atmofile and atmofile not in outputfile[tpe][archfile]:    outputfile[tpe][archfile].append(atmofile)
                          stdusedclean=re.sub('_ex.fits','_clean.fits',imgnex)
                          floyds.util.delete(stdusedclean)
                          _function='spline3'
                          iraf.specred.sarith(input1=imgnex,op='/',input2=atmofile,output=stdusedclean, format='multispec')
                          try:
                              _outputsens2=floyds.floydsspecdef.sensfunction(stdusedclean,_outputsens2,_function,8,_interactive)
                          except:
                              print 'Warning: problem computing sensitivity function'
                              _outputsens2=''
                          if setup not in atmo: atmo[setup]=[atmofile]
                          else: atmo[setup].append(atmofile)
                    else:
                          _function='spline3'
                          try:
                              _outputsens2=floyds.floydsspecdef.sensfunction(imgnex,_outputsens2,_function,12,_interactive,'3400:4700')#,3600:4300')
                          except:
                              print 'Warning: problem computing sensitivity function'
                              _outputsens2=''
                    if _outputsens2  and _outputsens2 not in outputfile[tpe][archfile]:    outputfile[tpe][archfile].append(_outputsens2)
    ###################################################
    if 'obj' in outputfile:
      for imm in outputfile['obj']:
        lista = []
        tt_red = ''
        ntt_red = ''
        tt_blue = ''
        for f in outputfile['obj'][imm]:
            if '_ex.fits' in f and '_blue_' in f:
                tt_blue = f
            elif '_ex.fits' in f and f[:3] == 'ntt':
                ntt_red = f
            elif '_ex.fits' in f and f[:2] == 'tt':
                tt_red = f
            else:
                lista.append(f)
        merged = ntt_red.replace('_red_', '_merge_')
        if tt_blue and ntt_red:
            floyds.floydsspecdef.combspec2(tt_blue, ntt_red, merged, scale=True, num=None)
        if os.path.isfile(merged):
            lista.append(merged)
            floyds.util.delete(tt_blue)
            floyds.util.delete(tt_red)
            floyds.util.delete(ntt_red)
        else:
            if tt_blue: lista.append(tt_blue)
            if tt_red:  lista.append(tt_red)
            if ntt_red: lista.append(ntt_red)
        outputfile['obj'][imm] = lista
    readme=floyds.floydsspecauto.writereadme()
    return outputfile,readme
示例#42
0
文件: kaitutils.py 项目: cenko/python
from pyraf import iraf
import copy, os, shutil, glob, sys, string, re, math, operator, time
import pyfits
from types import *
from mx.DateTime import *

from iqpkg import *
import ephem

# Necessary packages
iraf.images()
iraf.immatch()
iraf.imfilter()
iraf.noao()
iraf.imred()
iraf.ccdred()
iraf.digiphot()
iraf.apphot()

yes=iraf.yes
no=iraf.no
INDEF=iraf.INDEF
hedit=iraf.hedit
imgets=iraf.imgets
imcombine=iraf.imcombine

pyrafdir="python/pyraf/"
pyrafdir_key='PYRAFPARS'

if os.environ.has_key(pyrafdir_key):
    pardir=os.environ[pyrafdir_key]
示例#43
0
def combine_lamp(lstfile):
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.imcombine(input = '%ftbo%ftbo%@'+lstfile
        , output = 'Lamp', combine = 'sum', reject = 'none')
def reduce(imglist, files_arc, files_flat, _cosmic, _interactive_extraction,
           _arc):
    import string
    import os
    import re
    import sys
    import pdb
    os.environ["PYRAF_BETA_STATUS"] = "1"
    try:
        from astropy.io import fits as pyfits
    except:
        import pyfits
    import numpy as np
    import util
    import instruments
    import combine_sides as cs
    import cosmics
    from pyraf import iraf

    dv = util.dvex()
    scal = np.pi / 180.

    if not _interactive_extraction:
        _interactive = False
    else:
        _interactive = True

    if not _arc:
        _arc_identify = False
    else:
        _arc_identify = True

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.specred(_doprint=0)
    iraf.disp(inlist='1', reference='1')

    toforget = [
        'ccdproc', 'imcopy', 'specred.apall', 'longslit.identify',
        'longslit.reidentify', 'specred.standard', 'longslit.fitcoords',
        'onedspec.wspectext'
    ]
    for t in toforget:
        iraf.unlearn(t)
    iraf.ccdred.verbose = 'no'
    iraf.specred.verbose = 'no'
    iraf.ccdproc.darkcor = 'no'
    iraf.ccdproc.fixpix = 'no'
    iraf.ccdproc.flatcor = 'no'
    iraf.ccdproc.zerocor = 'no'
    iraf.ccdproc.ccdtype = ''

    iraf.longslit.mode = 'h'
    iraf.specred.mode = 'h'
    iraf.noao.mode = 'h'
    iraf.ccdred.instrument = "ccddb$kpno/camera.dat"

    list_arc_b = []
    list_arc_r = []

    for arcs in files_arc:
        hdr = util.readhdr(arcs)
        br, inst = instruments.blue_or_red(arcs)

        if br == 'blue':
            list_arc_b.append(arcs)
        elif br == 'red':
            list_arc_r.append(arcs)
        else:
            errStr = '{} '.format(str(util.readkey3(hdr, 'VERSION')))
            errStr += 'not in database'
            print(errStr)
            sys.exit()

    asci_files = []
    newlist = [[], []]

    print('\n### images to reduce :', imglist)
    #raise TypeError
    for img in imglist:
        if 'b' in img:
            newlist[0].append(img)
        elif 'r' in img:
            newlist[1].append(img)

    if len(newlist[1]) < 1:
        newlist = newlist[:-1]
    elif len(newlist[0]) < 1:
        newlist = newlist[1:]
    else:
        sides = raw_input("Reduce which side? ([both]/b/r): ")
        if sides == 'b':
            newlist = newlist[:-1]
        elif sides == 'r':
            newlist = newlist[1:]

    for imgs in newlist:
        hdr = util.readhdr(imgs[0])
        br, inst = instruments.blue_or_red(imgs[0])
        if br == 'blue':
            flat_file = '../RESP_blue'
        elif br == 'red':
            flat_file = '../RESP_red'
        else:
            errStr = 'Not in intrument list'
            print(errStr)
            sys.exit()

        iraf.specred.dispaxi = inst.get('dispaxis')
        iraf.longslit.dispaxi = inst.get('dispaxis')

        _gain = inst.get('gain')
        _ron = inst.get('read_noise')
        iraf.specred.apall.readnoi = _ron
        iraf.specred.apall.gain = _gain

        _object0 = util.readkey3(hdr, 'OBJECT')
        _date0 = util.readkey3(hdr, 'DATE-OBS')

        _object0 = re.sub(' ', '', _object0)
        _object0 = re.sub('/', '_', _object0)
        nameout0 = str(_object0) + '_' + inst.get('name') + '_' + str(_date0)

        nameout0 = util.name_duplicate(imgs[0], nameout0, '')
        timg = nameout0
        print('\n### now processing :', timg, ' for -> ', inst.get('name'))
        if len(imgs) > 1:
            img_str = ''
            for i in imgs:
                img_str = img_str + i + ','
            iraf.imcombine(img_str, output=timg)
        else:
            img = imgs[0]
            if os.path.isfile(timg):
                os.system('rm -rf ' + timg)
            iraf.imcopy(img, output=timg)

        # should just do this by hand
        iraf.ccdproc(timg,
                     output='',
                     overscan='no',
                     trim='no',
                     zerocor="no",
                     flatcor="yes",
                     readaxi='line',
                     flat=flat_file,
                     Stdout=1)

        img = timg

        #raw_input("Press Enter to continue...")
        if _cosmic:
            print('\n### starting cosmic removal')

            array, header = cosmics.fromfits(img)
            c = cosmics.cosmicsimage(array,
                                     gain=inst.get('gain'),
                                     readnoise=inst.get('read_noise'),
                                     sigclip=5,
                                     sigfrac=0.5,
                                     objlim=2.0)
            c.run(maxiter=5)
            cosmics.tofits('cosmic_' + img, c.cleanarray, header)
            img = 'cosmic_' + img

            print('\n### cosmic removal finished')
        else:
            print(
                '\n### No cosmic removal, saving normalized image for inspection???'
            )

        if inst.get('arm') == 'blue' and len(list_arc_b) > 0:
            arcfile = list_arc_b[0]
        elif inst.get('arm') == 'red' and len(list_arc_r) > 0:
            arcfile = list_arc_r[0]
        else:
            arcfile = None

        if arcfile is not None and not arcfile.endswith(".fits"):
            arcfile = arcfile + '.fits'

        if not os.path.isdir('database/'):
            os.mkdir('database/')

        if _arc_identify:
            os.system('cp ' + arcfile + ' .')
            arcfile = string.split(arcfile, '/')[-1]
            arc_ex = re.sub('.fits', '.ms.fits', arcfile)

            arcref = inst.get('archive_arc_extracted')
            arcref_img = string.split(arcref, '/')[-1]
            arcref_img = arcref_img.replace('.ms.fits', '')
            arcrefid = inst.get('archive_arc_extracted_id')
            os.system('cp ' + arcref + ' .')
            arcref = string.split(arcref, '/')[-1]
            os.system('cp ' + arcrefid + ' ./database')

            aperture = inst.get('archive_arc_aperture')
            os.system('cp ' + aperture + ' ./database')

            print('\n###  arcfile : ', arcfile)
            print('\n###  arcfile extraction : ', arc_ex)
            print('\n###  arc reference : ', arcref)

            # read for some meta data to get the row right
            tmpHDU = pyfits.open(arcfile)
            header = tmpHDU[0].header
            try:
                spatialBin = int(header['binning'].split(',')[0])
            except KeyError:
                spatialBin = 1
            apLine = 700 // spatialBin

            iraf.specred.apall(arcfile,
                               output=arc_ex,
                               ref=arcref_img,
                               line=apLine,
                               nsum=10,
                               interactive='no',
                               extract='yes',
                               find='yes',
                               nfind=1,
                               format='multispec',
                               trace='no',
                               back='no',
                               recen='no')

            iraf.longslit.reidentify(referenc=arcref,
                                     images=arc_ex,
                                     interac='NO',
                                     section=inst.get('section'),
                                     coordli=inst.get('line_list'),
                                     shift='INDEF',
                                     search='INDEF',
                                     mode='h',
                                     verbose='YES',
                                     step=0,
                                     nsum=5,
                                     nlost=2,
                                     cradius=10,
                                     refit='yes',
                                     overrid='yes',
                                     newaps='no')

        print('\n### extraction using apall')
        result = []
        hdr_image = util.readhdr(img)
        _type = util.readkey3(hdr_image, 'object')

        if (_type.startswith("arc") or _type.startswith("dflat")
                or _type.startswith("Dflat") or _type.startswith("Dbias")
                or _type.startswith("Bias")):
            print('\n### warning problem \n exit ')
            sys.exit()
        else:
            imgex = util.extractspectrum(img, dv, inst, _interactive, 'obj')
            print('\n### applying wavelength solution')
            print(arc_ex)
            iraf.disp(inlist=imgex, reference=arc_ex)

        result = result + [imgex] + [timg]

        # asci_files.append(imgasci)
        if not os.path.isdir(_object0 + '_ex/'):
            os.mkdir(_object0 + '_ex/')

        if not _arc_identify:
            util.delete(arcref)
        else:
            util.delete(arcfile)

        util.delete(arc_ex)
        util.delete(img)
        util.delete(imgex)
        util.delete(arcref)
        util.delete('logfile')
        #if _cosmic:
        #util.delete(img[7:])
        #util.delete("cosmic_*")

        os.system('mv ' + 'd' + imgex + ' ' + _object0 + '_ex/')

        use_sens = raw_input('Use archival flux calibration? [y]/n ')
        if use_sens != 'no':
            sensfile = inst.get('archive_sens')
            os.system('cp ' + sensfile + ' ' + _object0 + '_ex/')
            bstarfile = inst.get('archive_bstar')
            os.system('cp ' + bstarfile + ' ' + _object0 + '_ex/')

    return result
示例#45
0
import copy, os, shutil, glob, sys, string, re, math
import getopt
import pyfits
#import numarray
import numpy
import time
from types import *

# Local packages
import add_wcs
import iqpkg
from iqutils import *

# Necessary IRAF packages
iraf.imred()
iraf.ccdred()
iraf.images()
iraf.imutil()
iraf.imfit()
iraf.proto()

# Shortcuts
yes=iraf.yes
no=iraf.no
INDEF=iraf.INDEF
hedit=iraf.hedit
imgets=iraf.imgets

# Pyraf parameters: Where to find them
pyrafdir="python/pyraf/"
pyrafdir_key='PYRAFPARS'
示例#46
0
文件: rcred.py 项目: nblago/kpy
def create_masterflat(flatdir=None, biasdir=None, channel='rc'):
    '''
    Creates a masterflat from both dome flats and sky flats if the number of counts in the given filter
    is not saturated and not too low (between 1500 and 40000). 
    '''
    
    
    if (flatdir == None or flatdir==""): flatdir = "."
        
    if (biasdir == None or biasdir==""): biasdir = "."
        
    os.chdir(flatdir)
    
    if (len(glob.glob("Flat_%s*norm.fits"%channel)) == 4):
        print "Master Flat exists!"
        return 
    else:
        print "Starting the Master Flat creation!"

    bias_slow = "Bias_%s_fast.fits"%channel
    bias_fast = "Bias_%s_fast.fits"%channel
    
    if (not os.path.isfile(bias_slow) and not os.path.isfile(bias_fast) ):
        create_masterbias(biasdir)
     
    lsflat = []
    lfflat = []
    
    #Select all filts that are Flats with same instrument
    for f in glob.glob("*fits"):
        #try:
        if fitsutils.has_par(f, "OBJECT"):
            obj = str.upper(fitsutils.get_par(f, "OBJECT"))
        else:
            continue
        
        if ( ("DOME" in  obj or "FLAT" in obj) and (channel == fitsutils.get_par(f, "CHANNEL"))):
            if (fitsutils.get_par(f, "ADCSPEED")==2):
                lfflat.append(f)
            else:
                lsflat.append(f)
        #except:
        #    print "Error with retrieving parameters for file", f
        #    pass
                
    print "Files for slow flat", lsflat
    print "Files for fast flat", lfflat
    
    fsfile ="lflat_slow_"+channel
    np.savetxt(fsfile, np.array(lsflat), fmt="%s")
    fffile ="lflat_fast_"+channel
    np.savetxt(fffile, np.array(lfflat), fmt="%s")



    # Running IRAF
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    
    #Remove bias from the flat
    if len(lsflat) >0:
        iraf.imarith("@"+fsfile, "-", bias_slow, "b_@"+fsfile)
    
    if len(lfflat) >0:
        iraf.imarith("@"+fffile, "-", bias_fast, "b_@"+fffile)    
    
    #Slices the flats.
    debiased_flats = glob.glob("b_*.fits")
    for f in debiased_flats:
        print "Slicing file", f
        slice_rc(f)
        #Remove the un-sliced file
        os.remove(f)
        
    #Selects the ones that are suitable given the number of counts and combines them.
    bands = ['u', 'g', 'r', 'i']
    for b in bands:
        out = "Flat_%s_%s.fits"%(channel, b)
        out_norm = out.replace(".fits","_norm.fits")

        if (os.path.isfile(out_norm)):
            print "Master Flat for filter %s exists. Skipping..."%b
            continue
        
        lfiles = []
        for f in glob.glob('b_*_%s.fits'%b):
            d = pf.open(f)[0].data
            if np.percentile(d, 90)>1500 and np.percentile(d, 90)<40000:
                lfiles.append(f)

        if len(lfiles) == 0:
            print "WARNING!!! Could not find suitable flats for band %s"%b
            continue
        ffile ="lflat_"+b
        np.savetxt(ffile, np.array(lfiles), fmt="%s")
    
        
        #Cleaning of old files
        if(os.path.isfile(out)): os.remove(out)
        if(os.path.isfile(out_norm)): os.remove(out_norm)
        if(os.path.isfile("Flat_stats")): os.remove("Flat_stats")
        
        
        #Combine flats
        iraf.imcombine(input = "@"+ffile, \
                        output = out, \
                        combine = "median",\
                        scale = "mode",
                        weight = "exposure")
        iraf.imstat(out, fields="image,npix,mean,stddev,min,max,mode", Stdout="Flat_stats")
        st = np.genfromtxt("Flat_stats", names=True, dtype=None)
        #Normalize flats
        iraf.imarith(out, "/", st["MODE"], out_norm)
        
        #Do some cleaning
        print 'Removing from lfiles'
        for f in glob.glob('b_*_%s.fits'%b):
            os.remove(f)

        os.remove(ffile)
        
        
        if os.path.isfile(fsfile):
            os.remove(fsfile)
        if os.path.isfile(fffile):
            os.remove(fffile)
示例#47
0
def sofispecreduction(files, _interactive, _doflat, listflat, _docross, _verbose=False):
    # print "LOGX:: Entering `sofispecreduction` method/function in
    # %(__file__)s" % globals()
    import ntt
    from ntt.util import delete, readhdr, readkey3, correctcard, rangedata
    import string, re, sys, os, glob

    try:        
        from astropy.io import fits as pyfits
    except:     
        import pyfits

    from pyraf import iraf
    from numpy import argmin, array, min, isnan, arange, mean, sum
    from numpy import sqrt, pi

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    iraf.specred(_doprint=0)
    toforget = ['ccdred.flatcombine', 'ccdproc', 'specred.apall', 'longslit.identify', 'longslit.reidentify',
                'longslit.fitcoords', 'specred.transform', 'specred.response', 'imutil.hedit']
    for t in toforget:
        iraf.unlearn(t)
    iraf.longslit.dispaxi = 2
    iraf.longslit.mode = 'h'
    iraf.specred.dispaxi = 2
    iraf.specred.mode = 'h'
    iraf.ccdproc.darkcor = 'no'
    iraf.ccdproc.fixpix = 'no'
    iraf.ccdproc.flatcor = 'no'
    iraf.ccdproc.zerocor = 'no'
    iraf.ccdproc.overscan = 'no'
    iraf.ccdproc.ccdtype = ''
    iraf.ccdred.instrument = "/dev/null"

    iraf.set(direc=ntt.__path__[0] + '/')

    if _interactive:
        _interact = 'yes'
    else:
        _interact = 'no'
    if _verbose:
        iraf.ccdred.verbose = 'yes'
        iraf.specred.verbose = 'yes'
    else:
        iraf.specred.verbose = 'no'
        iraf.ccdred.verbose = 'no'
    import datetime
    import time

    now = datetime.datetime.now()
    datenow = now.strftime('20%y%m%d%H%M')
    MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days
    # if they are not sorted the fieldlist dict could crash
    files = ntt.sofiphotredudef.sortbyJD(files)
    outputlist = []
    setup = []
    fieldlist = {}
    OBID = {}
    RA = {}
    DEC = {}
    objects = {}
    flats = {}
    lamps1 = {}
    _rdnoise = readkey3(readhdr(re.sub('\n', '', files[0])), 'ron')
    _gain = readkey3(readhdr(re.sub('\n', '', files[0])), 'gain')
    for img in files:
        img = re.sub('\n', '', img)
        hdr = readhdr(img)
        _object = readkey3(hdr, 'object')
        _filter = readkey3(hdr, 'filter')
        _date = readkey3(hdr, 'date-night')
        _exptime = readkey3(hdr, 'exptime')
        _grism = readkey3(hdr, 'grism')
        _obsmode = readkey3(hdr, 'obsmode')
        _type = ''
        if _grism.lower() not in ['gr', 'gb']:
            _type = 'image'
        if not _type:
            if _object.lower() == 'flat':
                _type = 'flat'
                if _date not in flats:
                    flats[_date] = {}
                if _grism not in flats[_date]:
                    flats[_date][_grism] = [img]
                else:
                    flats[_date][_grism].append(img)
            elif _object.lower() == 'lamp':
                _lampid = (readkey3(hdr, 'esoid'), readkey3(hdr, 'grism'))
                if _lampid not in lamps1:
                    lamps1[_lampid] = [None, None]
                if readkey3(hdr, 'lamp1') == 'Xenon':
                    lamps1[_lampid][0] = img
                else:
                    lamps1[_lampid][1] = img
                _type = 'lamp'
                # if readkey3(hdr,'lamp1')=='Xenon':
            #                     _type='lamp'
            #                     if _grism not in lamps:
            #                         lamps[_grism]=[img]
            #                     else:
            #                         lamps[_grism].append(img)
            #                 else:
            #                     _type='notgood'
        if not _type:
            _ra = readkey3(hdr, 'RA')
            _dec = readkey3(hdr, 'DEC')
            _object_name = readkey3(hdr, 'object')
            _OBID = (readkey3(hdr, 'esoid'), _grism)
            if string.count(_object_name, '/') or string.count(_object_name, '.') or string.count(_object_name, ' '):
                nameobj = string.split(_object_name, '/')[0]
                nameobj = string.split(nameobj, ' ')[0]
                nameobj = string.split(nameobj, '.')[0]
            else:
                nameobj = _object_name
            if _grism not in fieldlist:
                fieldlist[_grism] = {}
            if _OBID not in OBID:
                count = 1
                nameobj0 = nameobj + '_' + str(count)
                answ = 'yes'
                while answ == 'yes':
                    if nameobj0 in fieldlist[_grism]:
                        count = count + 1
                        nameobj0 = nameobj + '_' + str(count)
                    else:
                        answ = 'no'
                fieldlist[_grism][nameobj0] = []
                OBID[readkey3(hdr, 'esoid'), _grism] = nameobj0
            fieldlist[_grism][nameobj0].append(img)

        if _verbose:
            print img
            print _type, _object, _filter
            print 'lamps', lamps1

    lamps = {}
    for _lampid in lamps1:
        lamp = ''
        output = 'arc_' + str(_lampid[0]) + '_' + str(_lampid[1]) + '.fits'
        if lamps1[_lampid][0] and lamps1[_lampid][1]:
            print lamps1[_lampid][0], lamps1[_lampid][1]
            # try:
            ntt.util.delete(output)
            iraf.imarith(lamps1[_lampid][0], '-', lamps1[_lampid]
                         [1], result=output, verbose='yes')
            #            except:
            #                print 'warning, lamp file not ON/OFF'
            #                os.system('cp '+lamps1[_lampid][0]+' '+output)

            lamp = output
        elif lamps1[_lampid][0] and not lamps1[_lampid][1]:
            os.system('cp ' + lamps1[_lampid][0] + ' ' + output)
            lamp = output
        if lamp:
            if _lampid[1] not in lamps:
                lamps[_lampid[1]] = [lamp]
            else:
                lamps[_lampid[1]].append(lamp)

    if _verbose:
        print '\n### FIELDS\n', fieldlist
        print '\n### OBID\n', OBID
        print '\n### FLATS\n', flats
        print '\n### LAMPS\n', lamps

#    if not flats:
#        sys.exit('\n### error: spectroscopic flat not available, add flats in the directory and try again')
#    if not lamps:
# sys.exit('\n### error: spectroscopic lamp not available, add lamps in
# the directory and try again')

    if not listflat:
        print '\n### list of available spectroscopic flats (ON,OFF):'
        for _date in flats:
            for _grism in flats[_date]:
                for img in flats[_date][_grism]:
                    if pyfits.open(img)[0].data.mean() >= 2000:
                        print img, _grism, _date, 'ON ? '
                    else:
                        print img, _grism, _date, 'OFF ? '
        for _date in flats:
            for _grism in flats[_date]:
                flat = {'ON': [], 'OFF': []}
                for img in flats[_date][_grism]:
                    _type = ''
                    if readkey3(hdr, 'lamp3'):
                        print '\n### header lamp3 found: flat ON ', str(img)
                        _type = 'ON'
                    else:
                        if pyfits.open(img)[0].data.mean() >= 2000:
                            _type = 'ON'
                        else:
                            _type = 'OFF'
                    aa, bb, cc = ntt.util.display_image(img, 1, '', '', False)
                    print '\n### number of flat already selected (ON,OFF): \n ### please select same number ' \
                          'of ON and OFF flats \n' + \
                        str(len(flat['ON'])) + '  ' + str(len(flat['OFF']))
                    print '\n### image ' + str(img)
                    answ = raw_input(
                        'ON/OFF/REJECT/STOP [' + str(_type) + ']  ok (ON[n]/OFF[f]/r/s) [' + _type + '] ? ')
                    if not answ:
                        answ = _type
                    if answ in ['ON', 'on', 'n']:
                        _type = 'ON'
                    if answ in ['OFF', 'off', 'f']:
                        _type = 'OFF'
                    if answ in ['s', 'S', 'STOP', 'stop', 'Stop']:
                        _type = 'stop'
                    if answ in ['r', 'R', 'reject']:
                        _type = 'r'
                    if _type in ['ON', 'OFF']:
                        flat[_type].append(img)
                    elif _type == 'stop':
                        if len(flat['ON']) == len(flat['OFF']) and len(flat['OFF']) >= 2:
                            break
                        elif len(flat['ON']) == len(flat['OFF']) and len(flat['OFF']) == 0:
                            break
                        else:
                            print '\n### Warning: you can stop only if the numbers of ON and OFF are the same'
                print len(flat['ON']), len(flat['OFF'])
                if len(flat['ON']) == len(flat['OFF']) and len(flat['OFF']) >= 2:
                    ff = open('_flatlist', 'w')
                    for ii in range(0, len(flat['OFF'])):
                        delete('flat_' + str(_date) + '_' + str(_grism) +
                               '_' + str(MJDtoday) + '_' + str(ii) + '.fits')
                        iraf.imarith(flat['ON'][ii], '-', flat['OFF'][ii],
                                     result='flat_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str(
                                         ii) + '.fits', verbose='no')
                        ff.write(
                            'flat_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str(ii) + '.fits\n')
                    ff.close()
                    masterflat = 'flat_' + \
                        str(_date) + '_' + str(_grism) + \
                        '_' + str(MJDtoday) + '.fits'
                    delete(masterflat)
                    _order = '80'
                    iraf.ccdred.flatcombine(input='@_flatlist', output=masterflat, combine='median', rdnoise=_rdnoise,
                                            gain=_gain, ccdtype='')
                    hdr = readhdr(masterflat)
                    matching = [s for s in hdr.keys() if "IMCMB" in s]
                    for imcmb in matching:
                        aaa = iraf.hedit(masterflat, imcmb, delete='yes', update='yes',
                                         verify='no', Stdout=1)
                    delete('_flatlist')
                    print masterflat
                    correctcard(masterflat)
                    if masterflat not in outputlist:
                        outputlist.append(masterflat)
                    ntt.util.updateheader(masterflat, 0, {'FILETYPE': [41102, 'flat field'],
                                                          'SINGLEXP': [False, 'TRUE if resulting from single exposure'],
                                                          'M_EPOCH': [False, 'TRUE if resulting from multiple epochs']})

                    print '\n###  master flat ........... done '
                    delete('n' + masterflat)
                    iraf.specred.response(masterflat, normaliz=masterflat + '[100:900,*]',
                                          response='n' + masterflat, interac=_interact, thresho='INDEF', sample='*',
                                          naverage=2,
                                          function='spline3', low_rej=3, high_rej=3, order=_order, niterat=20, grow=0,
                                          graphic='stdgraph', mode='q')
                    listflat.append('n' + masterflat)
                    if 'n' + masterflat not in outputlist:
                        outputlist.append('n' + masterflat)
                    ntt.util.updateheader('n' + masterflat, 0, {'FILETYPE': [41203, 'normalized flat field'],
                                                                'TRACE1': [masterflat, 'Originating file']})
                    # ntt.util.updateheader('n'+masterflat,0,{'TRACE1':[masterflat,'']})

                    flattot = flat['ON'] + flat['OFF']
                    num = 0
                    for img in flattot:
                        num = num + 1
                        ntt.util.updateheader(masterflat, 0, {
                            'PROV' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file'],
                            'TRACE' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file']})
                        ntt.util.updateheader('n' + masterflat, 0, {
                            'PROV' + str(num): [readkey3(readhdr(img), 'ARCFILE'), 'Originating file']})

                    if listflat:
                        print '\n### flat available:\n### ' + str(listflat), '\n'
                elif len(flat['ON']) == len(flat['OFF']) and len(flat['OFF']) == 0:
                    print '\n### no good flats in this set ......'
                else:
                    sys.exit('\n### Error: number of ON and OFF not the same')

    for _grism in fieldlist:
        obj0 = fieldlist[_grism][fieldlist[_grism].keys()[0]][0]
        # #############              arc              #########################
        if _grism not in lamps:
            print '\n### take arc from archive '
            arcfile = ntt.util.searcharc(obj0, '')[0]
            if arcfile[0] == '/':
                os.system('cp ' + arcfile + ' ' +
                          string.split(arcfile, '/')[-1])
                arcfile = string.split(arcfile, '/')[-1]
            lamps[_grism] = [arcfile]

        if _grism in lamps:
            arclist = lamps[_grism]
            if arclist:
                arcfile = ntt.util.searcharc(obj0, arclist)[0]
            else:
                arcfile = ntt.util.searcharc(obj0, '')[0]

            print arcfile
            if arcfile:
                print arcfile
                datea = readkey3(readhdr(arcfile), 'date-night')
                if arcfile[0] == '/':
                    os.system('cp ' + arcfile + ' ' +
                              string.split(arcfile, '/')[-1])
                    arcfile = string.split(arcfile, '/')[-1]

                if _doflat:
                    if listflat:
                        flat0 = ntt.util.searchflat(arcfile, listflat)[0]
                    else:
                        flat0 = ''
                else:
                    flat0 = ''

                if flat0:
                    _flatcor = 'yes'
                else:
                    _flatcor = 'no'
                    _doflat = False

                ntt.util.delete('arc_' + datea + '_' + _grism +
                                '_' + str(MJDtoday) + '.fits')

                print arcfile, flat0, _flatcor, _doflat

                if _doflat:
                    iraf.noao.imred.ccdred.ccdproc(arcfile,
                                                   output='arc_' + datea + '_' + _grism +
                                                   '_' +
                                                   str(MJDtoday) + '.fits',
                                                   overscan='no', trim='no', zerocor='no', flatcor=_flatcor, flat=flat0)
                else:
                    os.system('cp ' + arcfile + ' ' + 'arc_' + datea +
                              '_' + _grism + '_' + str(MJDtoday) + '.fits')

                iraf.noao.imred.ccdred.ccdproc('arc_' + datea + '_' + _grism + '_' + str(MJDtoday) + '.fits', output='',
                                               overscan='no', trim='yes', zerocor='no', flatcor='no', flat='',
                                               trimsec='[30:1000,1:1024]')

                arcfile = 'arc_' + datea + '_' + \
                    _grism + '_' + str(MJDtoday) + '.fits'

                ntt.util.correctcard(arcfile)
                print arcfile

                if arcfile not in outputlist:
                    outputlist.append(arcfile)

                ntt.util.updateheader(arcfile, 0, {'FILETYPE': [41104, 'pre-reduced 2D arc'],
                                                   'SINGLEXP': [True, 'TRUE if resulting from single exposure'],
                                                   'M_EPOCH': [False, 'TRUE if resulting from multiple epochs'],
                                                   'PROV1': [readkey3(readhdr(arcfile), 'ARCFILE'), 'Originating file'],
                                                   'TRACE1': [readkey3(readhdr(arcfile), 'ARCFILE'),
                                                              'Originating file']})

                arcref = ntt.util.searcharc(obj0, '')[0]
                if not arcref:
                    identific = iraf.longslit.identify(images=arcfile, section='column 10',
                                                       coordli='direc$standard/ident/Lines_XeAr_SOFI.dat', nsum=10,
                                                       fwidth=7, order=3, mode='h', Stdout=1, verbose='yes')
                else:
                    print arcref
                    os.system('cp ' + arcref + ' .')
                    arcref = string.split(arcref, '/')[-1]
                    if not os.path.isdir('database/'):
                        os.mkdir('database/')
                    if os.path.isfile(ntt.util.searcharc(obj0, '')[1] + '/database/id' + re.sub('.fits', '', arcref)):
                        os.system('cp ' + ntt.util.searcharc(obj0, '')[1] + '/database/id' + re.sub('.fits', '',
                                                                                                    arcref) + ' database/')

                    print arcref, arcfile
                    #                        time.sleep(5)
                    #                        os.system('rm -rf database/idarc_20130417_GR_56975')
                    #                        raw_input('ddd')
                    identific = iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac='NO',  # _interact,
                                                         section='column 10', shift=0.0,
                                                         coordli='direc$standard/ident/Lines_XeAr_SOFI.dat',
                                                         overrid='yes', step=0, newaps='no', nsum=5, nlost=2,
                                                         mode='h', verbose='yes', Stdout=1)
                    #                        print identific
                    #                        raw_input('ddd')
                    identific = iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac=_interact,
                                                         section='column 10', shift=1.0,
                                                         coordli='direc$standard/ident/Lines_XeAr_SOFI.dat',
                                                         overrid='yes', step=0, newaps='no', nsum=5, nlost=2,
                                                         mode='h', verbose='yes', Stdout=1)
                    #                        fitsfile = ntt.efoscspec2Ddef.continumsub('new3.fits', 6, 1)
                    # I need to run twice I don't know why
                    #                        print identific
                    #                        raw_input('ddd')
                    if _interactive:
                        answ = raw_input(
                            '\n### do you like the identification [[y]/n]')
                        if not answ:
                            answ = 'y'
                    else:
                        answ = 'y'
                    if answ in ['n', 'N', 'no', 'NO', 'No']:
                        yy1 = pyfits.open(arcref)[0].data[:, 10:20].mean(1)
                        xx1 = arange(len(yy1))
                        yy2 = pyfits.open(arcfile)[0].data[:, 10:20].mean(1)
                        xx2 = arange(len(yy2))

                        ntt.util.delete('_new3.fits')
                        hdu = pyfits.PrimaryHDU(yy1)
                        hdulist = pyfits.HDUList([hdu])
                        hdulist.writeto('_new3.fits')

                        fitsfile = ntt.efoscspec2Ddef.continumsub('_new3.fits', 4, 1)
                        yy1 = pyfits.open(fitsfile)[0].data

                        ntt.util.delete('_new3.fits')
                        hdu = pyfits.PrimaryHDU(yy2)
                        hdulist = pyfits.HDUList([hdu])
                        hdulist.writeto('_new3.fits')

                        fitsfile = ntt.efoscspec2Ddef.continumsub('_new3.fits', 4, 1)
                        yy2 = pyfits.open(fitsfile)[0].data

                        _shift = ntt.efoscspec2Ddef.checkwavelength_arc(
                            xx1, yy1, xx2, yy2, '', '') * (-1)

                        print arcref, arcfile, _shift
                        identific = iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac='YES',
                                                             section='column 10', shift=_shift,
                                                             coordli='direc$standard/ident/Lines_XeAr_SOFI.dat',
                                                             overrid='yes', step=0, newaps='no', nsum=5, nlost=2,
                                                             mode='h', verbose='yes', Stdout=1)

                        answ = raw_input('\n### is it ok now ? [[y]/n] ')
                        if not answ:
                            answ = 'y'
                        if answ in ['n', 'N', 'no', 'NO', 'No']:
                            sys.exit(
                                '\n### Warning: line identification with some problems')
                iraf.longslit.reidentify(referenc=arcfile, images=arcfile, interac='NO', section='column 10',
                                         coordli='direc$standard/ident/Lines_XeAr_SOFI.dat', overrid='yes', step=10,
                                         newaps='yes', nsum=5, nlost=2, mode='h', verbose='no')
                iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile), fitname=re.sub('.fits', '', arcfile),
                                        interac='no', combine='yes', databas='database',
                                        function='legendre', yorder=4, logfile='', plotfil='', mode='h')
                if identific:
                    _rms = float(identific[-1].split()[-1])
                    _num = float(identific[-1].split()[2].split('/')[0])
                    hdr = ntt.util.readhdr(arcfile)
                    hedvec = {'LAMRMS': [_rms * .1, 'residual RMS [nm]'],
                              'LAMNLIN': [_num, 'Nb of arc lines used in the fit of the wavel. solution'],
                              'SPEC_ERR': [(_rms * .1) / sqrt(float(_num)), 'statistical uncertainty'],
                              'SPEC_SYE': [0.1, 'systematic error']}
                    ntt.util.updateheader(arcfile, 0, hedvec)
            else:
                sys.exit('Warning: arcfile not found')
        else:
            print 'here'
        # ########################################################################################################
        for field in fieldlist[_grism]:
            listaobj = fieldlist[_grism][field]
            listaobj = ntt.sofiphotredudef.sortbyJD(listaobj)
            listatemp = listaobj[:]
            # ##############             flat            ######################
            if listflat and _doflat:
                flat0 = ntt.util.searchflat(listaobj[0], listflat)[0]
            else:
                flat0 = ''
            if flat0:
                _flatcor = 'yes'
            else:
                _flatcor = 'no'

            ##########   crosstalk        ###########################

            listatemp2 = []
            _date = readkey3(readhdr(listatemp[0]), 'date-night')
            for img in listatemp:
                #                    num2=listatemp.index(listasub[j])
                imgout = field + '_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str(
                    listatemp.index(img)) + '.fits'
                print '\n### input image: ' + str(img)
                delete(imgout)
                listatemp2.append(imgout)
                if _docross:
                    print '### correct for cross talk   .....   done'
                    ntt.sofiphotredudef.crosstalk(img, imgout)
                    correctcard(imgout)
                    ntt.util.updateheader(
                        imgout, 0, {'CROSSTAL': ['True', '']})
                else:
                    os.system('cp ' + img + ' ' + imgout)
                    correctcard(imgout)
                if _flatcor == 'yes':
                    print '### correct for flat field   .....   done'
                    try:
                        iraf.noao.imred.ccdred.ccdproc(imgout, output='', overscan='no', trim='no', zerocor='no',
                                                       flatcor=_flatcor, flat=flat0)
                    except:
                        iraf.imutil.imreplace(
                            images=flat0, value=0.01, lower='INDEF', upper=0.01, radius=0)
                        iraf.noao.imred.ccdred.ccdproc(imgout, output='', overscan='no', trim='no', zerocor='no',
                                                       flatcor=_flatcor, flat=flat0)
                iraf.noao.imred.ccdred.ccdproc(imgout, output='', overscan='no', trim='yes', zerocor='no',
                                               flatcor='no', flat='', trimsec='[30:1000,1:1024]')
                ntt.util.updateheader(
                    imgout, 0, {'FLATCOR': [flat0, 'flat correction']})

                if imgout not in outputlist:
                    outputlist.append(imgout)
                ntt.util.updateheader(imgout, 0, {'FILETYPE': [42104, 'pre-reduced frame'],
                                                  'SINGLEXP': [True, 'TRUE if resulting from single exposure'],
                                                  'M_EPOCH': [False, 'TRUE if resulting from multiple epochs'],
                                                  'PROV1': [readkey3(readhdr(imgout), 'ARCFILE'), 'Originating file'],
                                                  'TRACE1': [readkey3(readhdr(imgout), 'ARCFILE'), 'Originating file']})
                print '### output image: ' + str(imgout)

            listatemp = listatemp2[:]
            #########    differences object images  #####################
            listasub = ntt.sofispec2Ddef.findsubimage(listatemp)
            reduced = []
            print '\n### Select Frames to be subtracted (eg A-B, B-A, C-D, D-C, ....) '
            print '###    frame1 \t  frame2  \t   offset1  \t   offset2  \t  JD1  \t    JD2\n'
            if len(listatemp) >= 2 and len(listasub) >= 2:
                for j in range(0, len(listatemp)):
                    print '### ', listatemp[j], listasub[j], str(readkey3(readhdr(listatemp[j]), 'xcum')), str(
                        readkey3(readhdr(listasub[j]), 'xcum')), \
                        str(readkey3(readhdr(listatemp[j]), 'JD')), str(
                            readkey3(readhdr(listatemp[j]), 'JD'))
                    if _interactive:
                        answ = raw_input('\n### ok [[y]/n] ? ')
                        if not answ:
                            answ = 'y'
                    else:
                        answ = 'y'
                    num1 = j
                    image1 = listatemp[j]
                    _date = readkey3(readhdr(image1), 'date-night')
                    if answ == 'y':
                        num2 = listatemp.index(listasub[j])
                        image2 = listasub[j]
                    else:
                        image2 = raw_input(
                            'which image do you want to subtract')
                        num2 = listatemp.index(image2)
                    imgoutsub = field + '_' + str(_date) + '_' + str(_grism) + '_' + str(MJDtoday) + '_' + str(
                        num1) + '_' + str(num2) + '.fits'
                    delete(imgoutsub)
                    iraf.images.imutil.imarith(
                        operand1=image1, op='-', operand2=image2, result=imgoutsub, verbose='no')
                    ntt.util.updateheader(imgoutsub, 0, {'skysub': [image2, 'sky image subtracted'],
                                                         'FILETYPE': [42115, 'pre-reduced frame sky subtracted'],
                                                         'TRACE1': [image1, 'Originating file'],
                                                         'PROV2': [readkey3(readhdr(image2), 'ARCFILE'),
                                                                   'Originating file'],
                                                         'TRACE2': [image2, 'Originating file']})

                    reduced.append(imgoutsub)
                    if imgoutsub not in outputlist:
                        outputlist.append(imgoutsub)
            ########################     2D wavelengh calibration      ########
            for img in reduced:
                if arcfile:
                    hdra = ntt.util.readhdr(arcfile)
                    delete('t' + img)
                    iraf.specred.transform(input=img, output='t' + img, minput='',
                                           fitnames=re.sub('.fits', '', arcfile), databas='database',
                                           x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h',
                                           logfile='logfile')
                    ntt.util.updateheader('t' + img, 0,
                                          {'ARC': [arcfile, ''], 'FILETYPE': [42106, 'wavelength calibrate 2D frames'],
                                           'TRACE1': [img, 'Originating file']})
                    ntt.util.updateheader(
                        't' + img, 0, {'TRACE1': [img, 'Originating file']})
                    ntt.util.updateheader('t' + img, 0,
                                          {'LAMRMS': [ntt.util.readkey3(hdra, 'LAMRMS'), 'residual RMS [nm]'],
                                           'LAMNLIN': [ntt.util.readkey3(hdra, 'LAMNLIN'), 'number of arc lines'],
                                           'SPEC_ERR': [ntt.util.readkey3(hdra, 'SPEC_ERR'), 'statistical uncertainty'],
                                           'SPEC_SYE': [ntt.util.readkey3(hdra, 'SPEC_SYE'), 'systematic error']})
                    ###########################
                    delete('t' + arcfile)
                    iraf.specred.transform(input=arcfile, output='t' + arcfile, minput='',
                                           fitnames=re.sub('.fits', '', arcfile), databas='database',
                                           x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h',
                                           logfile='logfile')
                    specred = ntt.util.spectraresolution2(arcfile, 50)
                    if specred:
                        ntt.util.updateheader(
                            't' + img, 0, {'SPEC_RES': [specred, 'Spectral resolving power']})
                    delete('t' + arcfile)
                    ###########################
                    iraf.hedit('t' + img, 'TRACE2', delete='yes',
                               update='yes', verify='no', Stdout=1)

                    if 't' + img not in outputlist:
                        outputlist.append('t' + img)
                    print '\n### 2D frame t' + str(img) + ' wavelengh calibrated  ............ done'

                    _skyfile = ntt.__path__[
                        0] + '/standard/ident/sky_' + _grism + '.fits'  # check in wavelengh   #########
                    hdr = ntt.util.readhdr(img)
                    if glob.glob(_skyfile) and readkey3(hdr, 'exptime') > 20.:
                        _original = readkey3(hdr, 'ORIGFILE')
                        _archive = readkey3(hdr, 'ARCFILE')
                        if os.path.isfile(_archive):
                            imgstart = _archive
                        elif os.path.isfile(_original):
                            imgstart = _original
                        else:
                            imgstart = ''
                        if imgstart:
                            delete('_tmp.fits')
                            print imgstart, arcfile
                            iraf.specred.transform(input=imgstart, output='_tmp.fits', minput='',
                                                   fitnames=re.sub('.fits', '', arcfile), databas='database',
                                                   x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h',
                                                   logfile='logfile')

                            shift = ntt.sofispec2Ddef.skysofifrom2d('_tmp.fits', _skyfile)
                            zro = pyfits.open('_tmp.fits')[0].header.get('CRVAL2')

                            delete('_tmp.fits')
                            if _interactive:
                                answ = raw_input(
                                    'do you want to correct the wavelengh calibration with this shift: ' + str(
                                        shift) + ' [[y]/n] ? ')
                                if not answ:
                                    answ = 'y'
                            else:
                                answ = 'y'
                            if answ.lower() in ['y', 'yes']:
                                ntt.util.updateheader('t' + img, 0,
                                                      {'CRVAL2': [zro + int(shift), ''], 'shift': [float(shift), '']})
                            #                                    ntt.util.updateheader('t'+img,0,{'shift':[float(shift),'']})
                            print '\n### check wavelengh calibration with sky lines ..... done'
                    try:
                        hdrt = ntt.util.readhdr('t' + img)
                        wavelmin = float(readkey3(hdrt, 'CRVAL2')) + (0.5 - float(readkey3(hdrt, 'CRPIX2'))) * float(
                            readkey3(hdrt, 'CDELT2'))
                        wavelmax = float(readkey3(hdrt, 'CRVAL2')) + (
                            (float(readkey3(hdrt, 'NAXIS2')) + 0.5 - float(readkey3(hdrt, 'CRPIX2'))) * float(
                                readkey3(hdrt, 'CDELT2')))
                        hedvec = {}
                        hedvec['WAVELMIN'] = [
                            wavelmin * .1, '[nm] minimum wavelength']
                        hedvec['WAVELMAX'] = [
                            wavelmax * .1, ' [nm] maximum wavelength']
                        hedvec['XMIN'] = [wavelmin, '[A] minimum wavelength']
                        hedvec['XMAX'] = [wavelmax, '[A]  maximum wavelength']
                        hedvec['SPEC_BW'] = [
                            (wavelmax * .1) - (wavelmin * .1), '[nm] Bandpass Width Wmax - Wmin']
                        hedvec['SPEC_VAL'] = [
                            ((wavelmax * .1) + (wavelmin * .1)) / 2., '[nm] Mean Wavelength']
                        hedvec['SPEC_BIN'] = [
                            ((wavelmax * .1) - (wavelmin * .1)) /
                            (float(readkey3(hdr, 'NAXIS2')) - 1),
                            'Wavelength bin size [nm/pix]']
                        hedvec['VOCLASS'] = ['SPECTRUM V1.0', 'VO Data Model']
                        hedvec['VOPUB'] = ['ESO/SAF',
                                           'VO Publishing Authority']
                        #                            hedvec['APERTURE']=[float(re.sub('slit','',readkey3(hdrt,'slit'))),'aperture width']
                        ntt.util.updateheader('t' + img, 0, hedvec)
                    except:
                        pass
                else:
                    print '\n### Warning: arc not found for the image ' + str(img) + ' with setup ' + str(_grism)

    reduceddata = rangedata(outputlist)
    print '\n### adding keywords for phase 3 ....... '
    f = open('logfile_spec2d_' + str(reduceddata) +
             '_' + str(datenow) + '.raw.list', 'w')
    for img in outputlist:
        if img[-4:] == 'fits':
            hdr = readhdr(img)
            # ###############################################
            # cancel pc matrix
            if 'PC1_1' in hdr.keys():
                aaa = iraf.hedit(img, 'PC1_1', delete='yes',
                                 update='yes', verify='no', Stdout=1)
            if 'PC2_2' in hdr.keys():
                aaa = iraf.hedit(img, 'PC2_2', delete='yes',
                                 update='yes', verify='no', Stdout=1)
            if 'PC1_2' in hdr.keys():
                aaa = iraf.hedit(img, 'PC1_2', delete='yes',
                                 update='yes', verify='no', Stdout=1)
            if 'PC2_1' in hdr.keys():
                aaa = iraf.hedit(img, 'PC2_1', delete='yes',
                                 update='yes', verify='no', Stdout=1)
            #################
            # added for DR2
            print img

            if 'NCOMBINE' in hdr:
                _ncomb = readkey3(hdr, 'NCOMBINE')
            else:
                _ncomb = 1.0

            ntt.util.updateheader(
                img, 0, {'DETRON ': [12, 'Readout noise per output (e-)']})
            ntt.util.updateheader(img, 0, {'EFFRON': [12. * (1 / sqrt(readkey3(hdr, 'ndit') * _ncomb)) * sqrt(pi / 2),
                                                      'Effective readout noise per output (e-)']})
            ntt.util.phase3header(img)  # phase 3 definitions
            ############################
            #  change for DR2
            ############################
            texp = float(readkey3(hdr, 'dit')) * float(readkey3(hdr, 'ndit'))
            mjdend = float(readkey3(hdr, 'MJD-OBS')) + (float(readkey3(hdr, 'ndit')) * (
                float(readkey3(hdr, 'dit')) + 1.8)) / (60. * 60. * 24.)
            strtexp = time.strftime('%H:%M:%S', time.gmtime(texp))
            _telapse = (mjdend - float(readkey3(hdr, 'MJD-OBS'))) * \
                60. * 60 * 24.
            # tmid=_telapse/2.
            tmid = (mjdend + float(readkey3(hdr, 'MJD-OBS'))) / 2
            ntt.util.updateheader(img, 0, {'quality': ['Final', 'fast or rapid reduction'],
                                           'BUNIT': ['ADU', 'Physical unit of array values'],
                                           'DIT': [readkey3(hdr, 'dit'), 'Detector Integration Time'],
                                           'NDIT': [readkey3(hdr, 'ndit'), 'Number of sub-integrations'],
                                           'TEXPTIME': [texp, 'Total integration time of all exposures (s)'],
                                           'EXPTIME': [texp, 'Total integration time. ' + strtexp],
                                           'MJD-END': [mjdend, 'End of observations (days)'],
                                           'TELAPSE': [_telapse, 'Total elapsed time [days]'],
                                           'TMID': [tmid, '[d] MJD mid exposure'],
                                           'TITLE': [readkey3(hdr, 'object'), 'Dataset title'],
                                           #'TITLE':[str(tmid)[0:9]+' '+str(readkey3(hdr,'object'))+' '+str(readkey3(hdr,'grism'))+' '+\
                                           # str(readkey3(hdr,'filter'))+'
                                           # '+str(readkey3(hdr,'slit')),'Dataset
                                           # title'],\
                                           'EXT_OBJ': [False, 'TRUE if extended'],
                                           'CONTNORM': [False, 'spectrum normalized to the continuum'],
                                           'TOT_FLUX': [False, 'TRUE if phot cond and all src flux is captured'],
                                           'SPECSYS': ['TOPOCENT', 'Reference frame for spectral coordinate'],
                                           'FLUXCAL': ['ABSOLUTE', 'type of flux calibration'],
                                           'FLUXERR': [34.7, 'Fractional uncertainty of the flux [%]'],
                                           'DISPELEM': ['Gr#' + re.sub('Gr', '', readkey3(hdr, 'grism')),
                                                        'Dispersive element name']})
            if readkey3(hdr, 'tech'):
                ntt.util.updateheader(
                    img, 0, {'PRODCATG': ['SCIENCE.IMAGE', 'Data product category']})
            aaa = str(readkey3(hdr, 'arcfiles')) + '\n'
            f.write(aaa)
            try:
                ntt.util.airmass(img)  # phase 3 definitions
            except:
                print '\n### airmass not computed for image: ', img
        else:
            print img + ' is not a fits image'
    f.close()
    return outputlist, 'logfile_spec2d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list'
示例#48
0
def efoscfastredu(imglist, _listsens, _listarc, _ext_trace, _dispersionline,
                  _cosmic, _interactive):
    # print "LOGX:: Entering `efoscfastredu` method/function in %(__file__)s"
    # % globals()
    import string
    import os
    import re
    import sys
    os.environ["PYRAF_BETA_STATUS"] = "1"
    try:
        from astropy.io import fits as pyfits
    except:
        import pyfits
    from ntt.util import readhdr, readkey3
    import ntt
    import numpy as np
    dv = ntt.dvex()
    scal = np.pi / 180.
    if not _interactive:
        _interactive = False
        _inter = 'NO'
    else:
        _inter = 'YES'
    from pyraf import iraf

    iraf.noao(_doprint=0, Stdout=0)
    iraf.imred(_doprint=0, Stdout=0)
    iraf.ccdred(_doprint=0, Stdout=0)
    iraf.twodspec(_doprint=0, Stdout=0)
    iraf.longslit(_doprint=0, Stdout=0)
    iraf.onedspec(_doprint=0, Stdout=0)
    iraf.specred(_doprint=0, Stdout=0)
    toforget = [
        'ccdproc', 'imcopy', 'specred.apall', 'longslit.identify',
        'longslit.reidentify', 'specred.standard', 'longslit.fitcoords',
        'onedspec.wspectext'
    ]
    for t in toforget:
        iraf.unlearn(t)
    iraf.ccdred.verbose = 'no'  # not print steps
    iraf.specred.verbose = 'no'  # not print steps
    iraf.ccdproc.darkcor = 'no'
    iraf.ccdproc.fixpix = 'no'
    iraf.ccdproc.flatcor = 'no'
    iraf.ccdproc.zerocor = 'no'
    iraf.ccdproc.ccdtype = ''
    _gain = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'gain')
    _ron = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'ron')
    iraf.specred.apall.readnoi = _ron
    iraf.specred.apall.gain = _gain
    iraf.specred.dispaxi = 2
    iraf.longslit.dispaxi = 2
    iraf.longslit.mode = 'h'
    iraf.specred.mode = 'h'
    iraf.noao.mode = 'h'
    iraf.ccdred.instrument = "ccddb$kpno/camera.dat"
    iraf.set(direc=ntt.__path__[0] + '/')
    for img in imglist:
        hdr = ntt.util.readhdr(img)
        _tech = ntt.util.readkey3(hdr, 'tech')
        if _tech != 'SPECTRUM':
            sys.exit('error: ' + str(img) + ' is not a spectrum ')
        print '\n####  image name = ' + img + '\n'
        _grism0 = readkey3(hdr, 'grism')
        _filter0 = readkey3(hdr, 'filter')
        _slit0 = readkey3(hdr, 'slit')
        _object0 = readkey3(hdr, 'object')
        _date0 = readkey3(hdr, 'date-night')
        setup = (_grism0, _filter0, _slit0)
        _biassec0 = '[3:1010,1026:1029]'
        if _grism0 == 'Gr16':
            _trimsec0 = '[100:950,1:950]'
        elif _grism0 == 'Gr13':
            if _filter0 == 'Free':
                _trimsec0 = '[100:950,1:1015]'
            elif _filter0 == 'GG495':
                _trimsec0 = '[100:950,208:1015]'
            elif _filter0 == 'OG530':
                _trimsec0 = '[100:950,300:1015]'
        elif _grism0 == 'Gr11':
            _trimsec0 = '[100:950,5:1015]'
        else:
            _trimsec0 = '[100:950,5:1015]'
        _object0 = re.sub(' ', '', _object0)
        _object0 = re.sub('/', '_', _object0)
        nameout0 = 't' + str(_object0) + '_' + str(_date0)
        for _set in setup:
            nameout0 = nameout0 + '_' + _set
        nameout0 = ntt.util.name_duplicate(img, nameout0, '')
        timg = nameout0
        if os.path.isfile(timg):
            os.system('rm -rf ' + timg)
        iraf.imcopy(img, output=timg)
        iraf.ccdproc(timg,
                     output='',
                     overscan='no',
                     trim='yes',
                     zerocor="no",
                     flatcor="no",
                     readaxi='column',
                     trimsec=str(_trimsec0),
                     biassec=_biassec0,
                     Stdout=1)
        img = timg
        if _listarc:
            arcfile = ntt.util.searcharc(img, _listarc)[0]
        else:
            arcfile = ''
        if not arcfile:
            arcfile = ntt.util.searcharc(img, '')[0]
        else:
            iraf.ccdproc(arcfile,
                         output='t' + arcfile,
                         overscan='no',
                         trim='yes',
                         zerocor="no",
                         flatcor="no",
                         readaxi='column',
                         trimsec=str(_trimsec0),
                         biassec=str(_biassec0),
                         Stdout=1)
            arcfile = 't' + arcfile

        if _cosmic:
            # print cosmic rays rejection
            ntt.cosmics.lacos(img,
                              output='',
                              gain=_gain,
                              readn=_ron,
                              xorder=9,
                              yorder=9,
                              sigclip=4.5,
                              sigfrac=0.5,
                              objlim=1,
                              verbose=True,
                              interactive=False)
            print '\n### cosmic rays rejections ........ done '

        if not arcfile:
            print '\n### warning no arcfile \n exit '
        else:
            arcref = ntt.util.searcharc(img, '')[0]
            if arcfile[0] == '/':
                os.system('cp ' + arcfile + ' ' +
                          string.split(arcfile, '/')[-1])
                arcfile = string.split(arcfile, '/')[-1]
            arcref = string.split(arcref, '/')[-1]
            if arcref:
                os.system('cp ' + arcref + ' .')
                arcref = string.split(arcref, '/')[-1]
                if not os.path.isdir('database/'):
                    os.mkdir('database/')
                if os.path.isfile(
                        ntt.util.searcharc(img, '')[1] + '/database/id' +
                        re.sub('.fits', '', arcref)):
                    os.system('cp ' + ntt.util.searcharc(img, '')[1] +
                              '/database/id' + re.sub('.fits', '', arcref) +
                              ' database/')
                iraf.longslit.reidentify(
                    referenc=arcref,
                    images=arcfile,
                    interac=_inter,
                    section='column 10',
                    coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat',
                    overrid='yes',
                    step=0,
                    newaps='no',
                    nsum=5,
                    nlost=2,
                    mode='h',
                    verbose='no')
            else:
                iraf.longslit.identify(
                    images=arcfile,
                    section='column 10',
                    coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat',
                    nsum=10,
                    fwidth=7,
                    order=3,
                    mode='h')
            iraf.longslit.reident(
                referenc=arcfile,
                images=arcfile,
                interac='NO',
                section='column 10',
                coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat',
                overrid='yes',
                step=10,
                newaps='yes',
                nsum=5,
                nlost=2,
                mode='h',
                verbose='no')
            qqq = iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile),
                                          fitname=re.sub('.fits', '', arcfile),
                                          interac='no',
                                          combine='yes',
                                          databas='database',
                                          function='legendre',
                                          yorder=4,
                                          logfile='logfile',
                                          plotfil='',
                                          mode='h')
            iraf.specred.transform(input=img,
                                   output=img,
                                   minput='',
                                   fitnames=re.sub('.fits', '', arcfile),
                                   databas='database',
                                   x1='INDEF',
                                   x2='INDEF',
                                   y1='INDEF',
                                   y2='INDEF',
                                   flux='yes',
                                   mode='h',
                                   logfile='logfile')
            # ######################  check wavelength calibration ############
            _skyfile = ntt.__path__[0] + '/standard/ident/sky_' + setup[
                0] + '_' + setup[1] + '.fits'
            shift = ntt.efoscspec2Ddef.skyfrom2d(img, _skyfile)
            print '\n###     check in wavelengh performed ...... spectrum shifted of  ' + str(
                shift) + ' Angstrom \n'
            zro = pyfits.open(img)[0].header.get('CRVAL2')
            ntt.util.updateheader(img, 0, {'CRVAL2': [zro + int(shift), '']})
            std, rastd, decstd, magstd = ntt.util.readstandard(
                'standard_efosc_mab.txt')
            hdrt = readhdr(img)
            _ra = readkey3(hdrt, 'RA')
            _dec = readkey3(hdrt, 'DEC')
            _object = readkey3(hdrt, 'object')
            dd = np.arccos(
                np.sin(_dec * scal) * np.sin(decstd * scal) +
                np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos(
                    (_ra - rastd) * scal)) * ((180 / np.pi) * 3600)
            if min(dd) < 100:
                _type = 'stdsens'
                ntt.util.updateheader(img, 0,
                                      {'stdname': [std[np.argmin(dd)], '']})
                ntt.util.updateheader(
                    img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']})
            else:
                _type = 'obj'
            print '\n###      EXTRACTION USING IRAF TASK APALL \n'
            result = []
            if _type == 'obj':
                imgex = ntt.util.extractspectrum(img, dv, _ext_trace,
                                                 _dispersionline, _interactive,
                                                 _type)
                ntt.util.updateheader(
                    imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']})
                ntt.util.updateheader(
                    imgex, 0, {
                        'PRODCATG': [
                            'SCIENCE.' +
                            readkey3(readhdr(imgex), 'tech').upper(),
                            'Data product category'
                        ]
                    })
                ntt.util.updateheader(imgex, 0, {'TRACE1': [img, '']})
                result.append(imgex)
                if _listsens:
                    sensfile = ntt.util.searchsens(img, _listsens)[0]
                else:
                    sensfile = ''
                if not sensfile:
                    sensfile = ntt.util.searchsens(img, '')[0]
                if sensfile:
                    imgf = re.sub('.fits', '_f.fits', img)
                    _extinctdir = 'direc$standard/extinction/'
                    _extinction = 'extinction_lasilla.dat'
                    _observatory = 'lasilla'
                    _exptime = readkey3(hdrt, 'exptime')
                    _airmass = readkey3(hdrt, 'airmass')
                    ntt.util.delete(imgf)
                    iraf.specred.calibrate(input=imgex,
                                           output=imgf,
                                           sensiti=sensfile,
                                           extinct='yes',
                                           flux='yes',
                                           ignorea='yes',
                                           extinction=_extinctdir +
                                           _extinction,
                                           observatory=_observatory,
                                           airmass=_airmass,
                                           exptime=_exptime,
                                           fnu='no')
                    hedvec = {
                        'SENSFUN': [
                            string.split(sensfile, '/')[-1],
                            'sensitivity function'
                        ],
                        'FILETYPE':
                        [22208, '1D wavelength and flux calibrated spectrum '],
                        'SNR':
                        [ntt.util.StoN2(imgf, False), 'Average S/N ratio'],
                        'BUNIT':
                        ['erg/cm2/s/Angstrom', 'Flux Calibration Units'],
                        'TRACE1': [imgex, '']
                    }
                    ntt.util.updateheader(imgf, 0, hedvec)
                    imgout = imgf
                    imgd = ntt.efoscspec1Ddef.fluxcalib2d(img, sensfile)
                    ntt.util.updateheader(
                        imgd, 0, {
                            'FILETYPE': [
                                22209,
                                '2D wavelength and flux calibrated spectrum '
                            ]
                        })
                    ntt.util.updateheader(imgd, 0, {'TRACE1': [img, '']})
                    imgasci = re.sub('.fits', '.asci', imgout)
                    ntt.util.delete(imgasci)
                    iraf.onedspec.wspectext(imgout + '[*,1,1]',
                                            imgasci,
                                            header='no')
                    result = result + [imgout, imgd, imgasci]
            else:
                imgex = ntt.util.extractspectrum(img, dv, _ext_trace,
                                                 _dispersionline, _interactive,
                                                 'std')
                imgout = ntt.efoscspec1Ddef.sensfunction(
                    imgex, 'spline3', 6, _inter)
                result = result + [imgout]

    for img in result:
        if img[-5:] == '.fits':
            ntt.util.phase3header(img)  # phase 3 definitions
            ntt.util.airmass(img)  # phase 3 definitions
            ntt.util.updateheader(
                img, 0, {'quality': ['Rapid', 'Final or Rapid reduction']})
    return result
示例#49
0
cur.execute("SELECT spec_path,name from fields where ymd=140825 and ccd='ccd_1' and obstype='BIAS'")

# <codecell>

objs=cur.fetchall()

# <codecell>

from pyraf import iraf

# <codecell>

iraf.noao(_doprint=0,Stdout="/dev/null")
iraf.imred(_doprint=0,Stdout="/dev/null")
iraf.ccdred(_doprint=0,Stdout="/dev/null")

# <codecell>

iraf.ccdproc(images='tmp/flats/25aug10034.fits', ccdtype='', fixpix='no', oversca='no', trim='no', zerocor='yes', darkcor='no', flatcor='no', zero='tmp/masterbias',Stdout="/dev/null")

# <codecell>

pwd

# <codecell>

cd ~/Documents/workspace/GAP/IrafReduction/140825/ccd11/

# <codecell>
示例#50
0
def main():
    iraf.noao()
    iraf.imred()
    iraf.ccdred()
    iraf.crutil()
    iraf.twodspec()
    iraf.apextract()
    name = os.popen('ls object*.lst').readlines()
    name = [i.split('\n')[0] for i in name]
    if len(name) == 0:
        print 'no object, the script end'
    for i in name:
        print 'open file ' + i + '...'
        f = open(i)
        l = f.readlines()
        f.close()
        l = [i2.split('\n')[0] for i2 in l]
        for i2 in l:
            print i2
        objs = []
        calibs = []
        standards = []
        fno = l.index('object') + 1
        while fno < len(l):
            if '.fits' not in l[fno]:
                break
            objs.append('ftbo' + l[fno])
            fno += 1
        fno = l.index('calibration') + 1
        while fno < len(l):
            if '.fits' not in l[fno]:
                break
            calibs.append('ftbo' + l[fno])
            fno += 1
        fno = l.index('standard') + 1
        while fno < len(l):
            if '.fits' not in l[fno]:
                break
            standards.append('ftbo' + l[fno])
            fno += 1
        print '=' * 20, 'reject cosmic ray', '=' * 20
        #cobj = shift_align(objs)
        #cstandard = shift_align(standards)
        cobjs = []
        cstandards = []
        for eobj in objs:
            cobjs.append(cos_clear([eobj]))
        for eobj in standards:
            cstandards.append(cos_clear([eobj]))
        print 'The fits having rejected cosmic ray is: '
        print cobjs
        print cstandards
        print '=' * 20, 'get spectrum line', '=' * 20
        acobjs = []
        acstandards = []
        for cobj in cobjs:
            acobjs.append(get_spec_line(cobj))
        for cobj in cstandards:
            acstandards.append(get_spec_line(cobj))
        acalibs = []
        for i3 in calibs:
            acalibs.append(get_cal_spec_line(i3, cobj))
        print '=' * 20, 'wavelength identify', '=' * 20
        iacalibs = wl_identy(acalibs)
        print '=' * 20, 'wavelength calibrate', '=' * 20
        inamepart = i.replace('.','_').split('_')
        objnames = []
        stdobjnames = []
        for numb in xrange(len(acobjs)):
            objnames.append( 'iac' + inamepart[1] + '_' + inamepart[2] + '_' + inamepart[3] +
                    '_' + str(numb) + '.fits')
        for numb in xrange(len(acstandards)):
            stdobjnames.append('iacstd' + inamepart[1] + '_' + inamepart[2] + '_' + inamepart[3] +
                    '_' + str(numb) + '.fits')
        for numb in xrange(len(objnames)):
            wl_cal(acobjs[numb], objnames[numb], iacalibs)
        for numb in xrange(len(stdobjnames)):
            wl_cal(acstandards[numb], stdobjnames[numb], iacalibs)
        print '=' * 20, 'flux calibrate', '=' * 20
        for objname in objnames:
            final_objname = flux_cal_new(objname, stdobjnames)
        print 'the final object name is', final_objname
示例#51
0
def test(instr=None,rawdata_dir='UT160327',reduction_dir=None,dograting=None,dofilter=None,silent=False):
    print('='*70)
    print("---------- Drew's APO 3.5m reduction script ----------")
    print('='*70)

    print('loading some basic iraf packages...')
    iraf.noao(); iraf.imred(); iraf.ccdred()
    print('='*70)

##########################################################################################################
    print('1. INSTRUMENT ESTABLISHMENT\n')
    if instr is None:
        print('Which APO 3.5m instrument did you use (default is ARCTIC)? Choices:')
        print(' - enter 1 for ARCTIC (default)')
        print(' - enter 2 for DIS')
        print(' - enter 3 for something else')
        a=raw_input('')
        if a=='':  a='1'
        if a[:1]=='1': instr='arctic'
        if a[:1]=='2': instr='dis'
        if (a[:1]!='1') & (a[:1]!='2'): sys.exit("You're out of luck for now. Sorry!")
        print('\nOk. '+instr.upper()+' data will be reduced')
        print('='*70)

##########################################################################################################
    print('2. RAW DATA DIRECTORY ESTABLISHMENT\n')
    if rawdata_dir is None:
        a=raw_input('Enter the name of the directory where your images are stored: ')
        if a=='': sys.exit('You must specify a raw data directory. Exiting the APO 3.5m reduction script.')
        if a[-1]!='/': a=a+'/'
        rawdata_dir=a
        check=glob.glob(rawdata_dir)
        if len(check)==0: sys.exit('The directory "'+rawdata_dir+'" does not exist. Exiting the APO 3.5m reduction script.')
    else:
        if rawdata_dir[-1:]!='/': rawdata_dir=rawdata_dir+'/'
        check=glob.glob(rawdata_dir)
        if len(check)==0: sys.exit('The directory "'+rawdata_dir+'" does not exist. Exiting the APO 3.5m reduction script.')

    print('Ok. The raw data directory will be "'+rawdata_dir+'"')
    print('='*70)

##########################################################################################################
    print('3. RAW DATA CHECK\n')
    rawfiles = np.array(glob.glob(rawdata_dir+'*fits')); nraw=len(rawfiles)
    if nraw==0: sys.exit('No FITS files found in the raw data directory. Exiting the APO 3.5m reduction script.')
    instrlist=[]; imtype=[]
    for rawfile in rawfiles:
        head=fits.getheader(rawfile)
        instrlist.append(head['instrume'].lower())
    instrlist=np.array(instrlist)
    gd=np.where(instrlist==instr); ngd=len(gd[0])
    if ngd==0: 
        sys.exit('No FITS files from '+instr.upper()+' were found in the raw data directory. Exiting the APO 3.5m reduction script.')
    else:
        rawfiles=rawfiles[gd]
        print('Ok. Found '+str(ngd)+' FITS images with instrument='+instr.upper()+' in the raw data directory.')
        print('='*70)

##########################################################################################################
    print('4. REDUCTION DIRECTORY ESTABLISHMENT\n')
    if reduction_dir is not None:
        check=glob.glob(reduction_dir)
        if len(check)==0: 
            a=raw_input('Specified reduction directory does not exist. Create it (Y/N)? ')
            if (a=='') | (a[:1].lower()=='y'): 
                cmnd='mkdir '+reduction_dir
                os.system(cmnd)
                print('"'+reduction_dir+'" has been created.')
    else:
        reduction_dir=rawdata_dir[:-1]+'_reduction/'
        print('The reduction directory will be '+reduction_dir)
        print(' - hit ENTER to accept')
        print(' - otherwise enter a different reduction directory name: ')
        a=raw_input('')
        if a=='':
            check=glob.glob(reduction_dir)
            if len(check)==0:
                cmnd='mkdir '+reduction_dir
                os.system(cmnd)
                print('\nOk. "'+reduction_dir+'" has been created.')
        else:
            reduction_dir=a
            check=glob.glob(reduction_dir)
            if len(check)==0:
                cmnd='mkdir '+reduction_dir 
                os.system(cmnd)
                print('\nOk. "'+reduction_dir+'" has been created.')

    print('\nOk. Reduction products will be stored in '+reduction_dir+'.')
    print('='*70)

##########################################################################################################
    if instr=='dis':
        print('loading some more IRAF packages for DIS reduction...')
        iraf.twod(); iraf.apex(); iraf.longs(); iraf.oned()
        print('='*70)
        print('5. DIS PARAMETER ESTABLISHMENT\n')
        grating=[]; imagetypes=[]
        for rawfile in rawfiles:
            head=fits.getheader(rawfile)
            grating.append(head['grating'])
            imagetypes.append(head['imagetyp'].lower())
        imagetypes=np.array(imagetypes)
        grating=np.array(grating)
        uniquegrating=np.unique(grating)

        if dograting is None:
            print("Which DIS grating do you want to reduce? Here's what you have:")
            for i in range(len(uniquegrating)): 
                if i==0: 
                    print(' - enter '+str(i+1)+' for '+uniquegrating[i]+' (default)')
                else:
                    print(' - enter '+str(i+1)+' for '+uniquegrating[i])
            a=raw_input('')
            if a=='': a='1'
            dograting=uniquegrating[int(a[:1])-1]
            gd=np.where(grating==dograting); ngd=len(gd[0])
            rawfiles=rawfiles[gd]; imagetypes=imagetypes[gd]
            print('\nOk. '+dograting+' data will be reduced: '+str(ngd)+' total images')
        else:
            gd=np.where(grating==dograting); ngd=len(gd[0])
            if ngd==0:
                sys.exit('No '+dograting+' images found. Exiting the APO 3.5m reduction script.')
            else:
                rawfiles=rawfiles[gd]; imagetypes=imagetypes[gd]
                print('\nOk. '+dograting+' images will be reduced: '+str(ngd)+' total images')

        bias=np.where((imagetypes=='bias') | (imagetypes=='zero')); nbias=len(bias[0])
        zerocor='yes'
        if nbias==0:
            print('\nNo bias images were found. Proceed without bias correction? (y/n)')
            a=raw_input('')
            if (a[:1]=='y') | (a==''): 
                zerocor='no'; biasfiles=''; nbias=0
            else:
                sys.exit('Exiting the APO 3.5m reduction script.')
        else:
            biasfiles=rawfiles[bias]; nbias=len(bias[0])

        # get values depending on R vs B detector
        detector=dograting[:1].lower()
        if detector=='b':
            gain=1.68
            rdnoise=4.9
            badpixfile=''
            fixpix='no'
        if detector=='r':
            gain=1.88
            rdnoise=4.6
            badpixfile='badpix_disR.txt'
            fixpix='yes'

        head=fits.getheader(rawfiles[0])
        biassec=head['biassec']
        datasec=head['datasec']

        genericlabel=dograting

        print('\nOk. DIS parameters established.')
        print('='*70)

##########################################################################################################
    if instr=='arctic':
        print('5. ARCTIC PARAMETER ESTABLISHMENT\n')
        gain=2.0
        rdnoise=3.7
        badpixfile=''
        fixpix='no'

        filters=[]; imagetypes=[]
        for rawfile in rawfiles:
            head=fits.getheader(rawfile)
            filters.append(head['filter'])
            imagetypes.append(head['imagetyp'].lower())
        imagetypes=np.array(imagetypes)
        filters=np.array(filters)
        uniquefilters=np.unique(filters)

        bias=np.where(imagetypes=='bias'); nbias=len(bias[0])
        zcor='yes'
        if nbias==0:
            print('\nNo bias images were found. Proceed without bias correction? (y/n)')
            a=raw_input('')
            if (a[:1]=='y') | (a==''): 
                zcor='no'; biasfiles=''
            else:
                sys.exit('Exiting the APO 3.5m reduction script.')
        else:
            biasfiles=rawfiles[bias]; nbias=len(bias[0])

        # establish ARCTIC filter
        if dofilter is None:
            print('Which ARCTIC filter do you want to reduce? ')
            for i in range(len(uniquefilters)): 
                if i==0: 
                    print(' - enter '+str(i+1)+' for '+uniquefilters[i]+' (default)')
                else:
                    print(' - enter '+str(i+1)+' for '+uniquefilters[i])
            a=raw_input('')
            if a=='': a='1'
            dofilter=uniquefilters[int(a[:1])-1]
            gd=np.where(filters==dofilter); ngd=len(gd[0])
            rawfiles=rawfiles[gd]
            print('\nOk. '+dofilter+' filter images will be reduced: '+str(ngd)+' total images')
        else:
            gd=np.where(filters==dofilter); ngd=len(gd[0])
            if ngd==0:
                sys.exit('No '+dofilter+' images found. Exiting the APO 3.5m reduction script.')
            else:
                rawfiles=rawfiles[gd]
                print('\nOk. '+dofilter+' filter images will be reduced: '+str(ngd)+' total images')


        head=fits.getheader(rawfiles[0])
        biassec=head['bsec11']
        datasec=head['dsec11']

        genericlabel=dofilter.replace(' ','_')
        genericlabel=genericlabel.replace('/','_')

        print('\nOk. ARCTIC parameters established.')
        print('='*70)

##########################################################################################################
    print('6. GET IMAGE TYPES\n')
    nrawfiles = len(rawfiles)

    # gather some more info from headers
    imtype = []; exptime = []; ra = []; dec = []
    for rawfile in rawfiles:
        header = fits.getheader(rawfile)
        imtype.append(header['IMAGETYP'].lower())
        exptime.append(header['EXPTIME'])
        ra.append(header['RA'])
        dec.append(header['DEC'])
    imtype=np.array(imtype)
    exptime=np.array(exptime); ra=np.array(ra)
    dec=np.array(dec)

    # separate the files into biases, flat, objs, and comps
    flat=np.where(imtype=='flat');  nflat=len(flat[0]); flatfiles=rawfiles[flat]
    obj=np.where(imtype=='object'); nobj=len(obj[0]);   objfiles=rawfiles[obj]
    if instr=='dis':
        comp=np.where(imtype=='comp'); ncomp=len(comp[0]); compfiles=rawfiles[comp]
        print('Ok. You have '+str(nbias)+' biases, '+str(nflat)+' flats, '+str(ncomp)+' comps, and '+str(nobj)+' objects.')
    if instr=='arctic':
        print('Ok. You have '+str(nbias)+' biases, '+str(nflat)+' flats, and '+str(nobj)+' objects.')
    print('='*70)


##########################################################################################################
    print('7. MAKE MASTER BIAS\n')
    if zcor=='yes':
        masterzero=reduction_dir+'Zero_'+genericlabel+'.fits'
        a=raw_input('Average combine biases into master bias (y/n)? ')
        if (a[:1]=='y') | (a==''):
            iraf.imcombine(','.join(biasfiles),output=masterzero,combine='average',reject='avsigclip',lsigma='3',
                           hsigma='3',rdnoise=rdnoise,gain=gain)
            print('\nOk. A master bias has been made: '+masterzero)
        print('='*70)
    else:
        masterzero=''
        print('\nOk, you have no bias files. Proceeding')


##########################################################################################################
    print('8. BIAS AND OVERSCAN CORRECTION\n')
    a=raw_input('Overscan and bias-subtraction of the flats, objs, and if DIS, comps. Do it (y/n)? ')
    if (a[:1]=='y') | (a==''):
        # set up a list of the input files (flats, comps, objs)
        if instr=='dis': infiles=np.concatenate((flatfiles,compfiles,objfiles),axis=0)
        if instr=='arctic': infiles=np.concatenate((flatfiles,objfiles),axis=0)

        infilesfile=reduction_dir+'inlist_'+genericlabel+'_ccdproc1'
        np.savetxt(infilesfile,infiles,fmt='%s')
        
        # We're making changes to the images now, so need to set up a list of output files
        outfiles=[]
        for infile in infiles:
            tmp=infile.split('/')
            tmp1=tmp[len(tmp)-1].split('.fits')
            outfiles.append(reduction_dir+tmp1[0]+'_cproc1.fits')
        outfiles=np.array(outfiles)
        outfilesfile=reduction_dir+'outlist_'+genericlabel+'_ccdproc1'
        np.savetxt(outfilesfile,outfiles,fmt='%s')

        iraf.ccdproc('@'+infilesfile,output='@'+outfilesfile,ccdtype='',fixpix=fixpix,oversca='yes',trim='yes',
                     zerocor=zcor,darkcor='no',flatcor='no',fixfile=badpixfile,biassec=biassec,trimsec=datasec,
                     zero=masterzero,interac='no',low_rej='3',high_re='3')
        print('\nOk. Overscan and bias subtracting done.')
    print('='*70)

##########################################################################################################
    print('9. MAKE MASTER FLAT\n')
    masterflat=reduction_dir+'Flat_'+genericlabel+'.fits'
    a=raw_input('Median combine the flats (y/n)? ')
    if (a[:1]=='y') | (a==''):
        procfiles=np.array(glob.glob(reduction_dir+'*_cproc1.fits'))
        imtyp=[]
        for i in range(len(procfiles)):
            head=fits.getheader(procfiles[i])
            imtyp.append(head['imagetyp'].lower())
        imtyp=np.array(imtyp)
        gd=np.where(imtyp=='flat')
        flats=procfiles[gd]

        iraf.imcombine(','.join(flats),output=masterflat,combine='median',reject='avsigclip',lsigma='2',
                       hsigma='2',rdnoise=rdnoise,gain=gain)
        print('\nOk. A master flat has been made: '+masterflat)
    print('='*70)

##########################################################################################################
    print('10. MAKE NORMALIZED MASTER FLAT\n')
    masternormflat=reduction_dir+'NormFlat_'+genericlabel+'.fits'
    a=raw_input('Normalize the response of the master flat (y/n)? ')
    if (a[:1]=='y') | (a==''):
        if instr=='arctic':
            im=fits.getdata(masterflat)
            imdim=len(im)
            meanval=np.mean(im[imdim-100:imdim+100,imdim-100:imdim+100])
            iraf.imarith(operand1=masterflat,op='/',operand2=meanval,result=masternormflat)
            print('\nOk. A normalized master flat has been created: '+masternormflat)
        if instr=='dis':
            iraf.response(calibrat=masterflat,normaliz=masterflat+'[*,400:500]',response=masternormflat,
                          interac='no',functio='spline3',order='5',low_rej='3',high_rej='3')
            print('\nOk. A normalized master flat has been created: '+masternormflat)
    print('='*70)

##########################################################################################################
    print('11. FLAT FIELD THE IMAGES\n')

    a=raw_input('Flat field the objs & comps by the normalized master flat (y/n)? ')
    if (a[:1]=='y') | (a==''):
        tmpfiles=np.array(glob.glob(reduction_dir+'*_cproc1.fits'))
        imtyp=[]
        for i in range(len(tmpfiles)):
            head=fits.getheader(tmpfiles[i])
            imtyp.append(head['imagetyp'].lower())
        imtyp=np.array(imtyp)
        gd1=np.where(imtyp=='object')
        if instr=='dis':
            gd2=np.where(imtyp=='comp')
            infiles=np.concatenate((tmpfiles[gd1],tmpfiles[gd2]),axis=0)
        if instr=='arctic':
            infiles=tmpfiles[gd1]
        infilesfile=reduction_dir+'inlist_'+genericlabel+'_ccdproc2'
        np.savetxt(infilesfile,infiles,fmt='%s')

        outfiles=[]
        for infile in infiles: 
            outfiles.append(infile.replace('cproc1','cproc2'))
        outfiles=np.array(outfiles)
        outfilesfile=reduction_dir+'outlist_'+genericlabel+'_ccdproc2'
        np.savetxt(outfilesfile,outfiles,fmt='%s')

        iraf.ccdproc('@'+infilesfile,output='@'+outfilesfile,ccdtype='',fixpix='no',oversca='no',trim='no',
                     zerocor='no',darkcor='no',flatcor='yes',fixfile='',biassec=biassec,trimsec=datasec,
                     flat=masternormflat,interac='no',low_rej='3',high_re='3')
        print('\nOk. The objs and comps have been flat fielded.')
    print('='*70)


##########################################################################################################
    if instr=='dis':
        print('Next steps (done manually for now):')
        print('='*70)
        print('12. Extract apertures via APALL.')
        print('13. Run IDENTIFY on one or more comps.')
        print('14. (optional) Run REIDENTIFY to transfer solution among comps.')
        print('15. Assign solution(s) to objs using REFSPECTRA.')
        print('16. Run DISPCOR on the objs to apply the wavelength solution.')
        print('='*70)
        print('Other things that should be done:')
        print('='*70)
        print('step ?1. flux calibration.')
        print('step ?2. sky/background subtraction.')
        print('step ?3. heliocentric velocity correction.')


    return imtyp
示例#52
0
def makeillumination(lista,flatfield):#,outputfile,illum_frame):
    import os,glob,string,re
    from astropy.io import fits as pyfits
    import ntt
    from ntt.util import readhdr, readkey3, delete, display_image, defsex,  name_duplicate, correctcard
    from numpy import compress, array, argmax, argmin, min, argsort, float32
    import datetime
    MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days
    _date = readkey3(readhdr(lista[0]), 'date-night')
    _filter = readkey3(readhdr(lista[0]), 'filter')
    illum_frame = name_duplicate(
        lista[0], 'illum_' + _date + '_' + _filter + '_' + str(MJDtoday), '')
    from pyraf import iraf
    iraf.images(_doprint=0)
    iraf.imutil(_doprint=0)
    iraf.utilities(_doprint=0)
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.digiphot(_doprint=0)
    iraf.daophot(_doprint=0)
    iraf.generic(_doprint=0)
    toforget = ['digiphot.daophot', 'imutil.imarith',
                'image', 'utilities.surfit']
    for t in toforget:
        iraf.unlearn(t)
    n = len(lista)
#   start loop to read image names from the input file
    lista1 = []
    iraf.ccdred.verbose = 'no'
    ff = open('templist.lst', 'w')
    for i in range(0, len(lista)):
        ff.write('C' + lista[i] + '\n')
        delete('C' + lista[i])
        delete('C' + re.sub('.fits', '_sub.fits', lista[i]))
        ntt.sofiphotredudef.crosstalk(lista[i], 'C' + lista[i])
        iraf.noao.imred.ccdred.ccdproc('C' + lista[i], output='', overscan="no", trim="yes", ccdtype='', darkcor='no', fixpix='no', zerocor="no", flatcor='yes',
                                       illumco='no', trimsec='[1:1024,1:1007]', biassec='', flat=flatfield, illum='')
        correctcard('C' + lista[i])
        lista1.append('C' + lista[i])
    ff.close()
    print '\n### prereducing STD frames to compute illumination correction ........'
    lista2, skyfile = ntt.sofiphotredudef.skysub(lista1, readkey3(
        readhdr(lista1[0]), 'ron'), readkey3(readhdr(lista1[0]), 'gain'), True)
    lista2 = ntt.sofiphotredudef.sortbyJD(lista2)
    print '\n### use x on the star and q  to continue....'
    display_image(lista2[0], 2, '', '', False)
    delete('tmpone.coo')
    iraf.image.tv.imexamine(lista2[0], 2, logfile='tmpone.coo',
                            keeplog='yes', xformat='', yformat='', wcs='logical')
    iraf.tvmark(2, 'tmpone.coo', mark="circle", number='yes',
                label='no', radii=8, nxoffse=5, nyoffse=5, color=204, txsize=2)
    xycoo = iraf.proto.fields('tmpone.coo', '1,2', Stdout=1)
    x0, y0 = string.split(xycoo[0])
    x0 = float(x0)
    y0 = float(y0)
    xcum0 = readkey3(readhdr(lista2[0]), 'xcum')
    ycum0 = readkey3(readhdr(lista2[0]), 'ycum')
    iraf.digiphot(_doprint=0)
    iraf.daophot(_doprint=0)
    iraf.noao.digiphot.daophot.datapars.datamin = -1000
    iraf.noao.digiphot.daophot.datapars.datamax = 60000
    iraf.noao.digiphot.daophot.daopars.function = 'gauss'
    iraf.noao.digiphot.daophot.photpars.zmag = 0
    namesex = defsex('default.sex')
    for i in range(0, len(lista2)):
        j = i + 1
        xcum = readkey3(readhdr(lista2[i]), 'xcum')
        ycum = readkey3(readhdr(lista2[i]), 'ycum')
        xx = x0 - xcum0 + xcum
        yy = y0 - ycum0 + ycum
        # sex objects
        os.system('sex ' + lista2[i] + ' -c ' + namesex + '>  _logsex')
        delete('_logsex')
        xpix = iraf.proto.fields('detections.cat', fields='2', Stdout=1)
        ypix = iraf.proto.fields('detections.cat', fields='3', Stdout=1)
        cm = iraf.proto.fields('detections.cat', fields='4', Stdout=1)
        cm = compress((array(xpix) != ''), array(cm, float))
        ypix = compress((array(xpix) != ''), array(ypix, float))
        xpix = compress((array(xpix) != ''), array(xpix, float))
        if len(xpix) > 300:
            num = 300
        else:
            num = len(xpix) - 1
        xpix = xpix[argsort(cm)][0:num]
        ypix = ypix[argsort(cm)][0:num]
        distance = (ypix - yy)**2 + (xpix - xx)**2
        xx1, yy1 = xpix[argmin(distance)], ypix[argmin(distance)]
        f = open('tmpone.coo', 'w')
        f.write(str(xx1) + ' ' + str(yy1) + '\n')
        f.close()
        display_image(lista2[i], 1, '', '', False)
        iraf.tvmark(1, 'tmpone.coo', mark="circle", number='yes',
                    label='no', radii=8, nxoffse=5, nyoffse=5, color=204, txsize=2)
        answ = 'n'
        while answ != 'y':
            answ = raw_input('selected the right one [[y]/n] ?')
            if not answ:
                answ = 'y'
            if answ in ['y', 'YES', 'yes', 'Y']:
                print lista2[i]
                delete('pippo.' + str(j) + '.mag')
                gggg = iraf.digiphot.daophot.phot(
                    lista2[i], "tmpone.coo", output="pippo." + str(j) + ".mag", verify='no', interac='no', Stdout=1)
                try:
                    float(string.split(gggg[0])[3])
                    answ = 'y'
                except:
                    print '\n### warning'
                    answ = 'n'
            else:
                print '\n### select the std star'
                display_image(lista2[i], 1, '', '', False)
                iraf.image.tv.imexamine(lista2[
                                        i], 1, logfile='tmpone.coo', keeplog='yes', xformat='', yformat='', wcs='logical')
                xycoo = iraf.proto.fields('tmpone.coo', '1,2', Stdout=1)
                x2, y2 = string.split(xycoo[0])
                f = open('tmpone.coo', 'w')
                f.write(str(x2) + ' ' + str(y2) + '\n')
                f.close()
                delete('pippo.' + str(j) + '.mag')
                print '###### new selection ' + str(x2), str(y2)
                gggg = iraf.digiphot.daophot.phot(
                    lista2[i], "tmpone.coo", output='pippo.' + str(j) + '.mag', verify='no', interac='no', Stdout=1)
                try:
                    float(string.split(gggg[0])[3])
                    answ = 'y'
                except:
                    print '\n### warning'
                    answ = 'n'

    os.system('ls pippo.*.mag > tempmag.lst')
    tmptbl0 = iraf.txdump(textfile="@tempmag.lst",
                          fields="XCENTER,YCENTER,FLUX", expr='yes', Stdout=1)
    ff = open('magnitudini', 'w')
    for i in tmptbl0:
        ff.write(i + '\n')
    ff.close()
#   delete the temporary images and files
    delete("temp*.fits")
    delete('temp*.lst')
    delete(illum_frame)
    print '\n### fitting the illumination surface...'
    aaa = iraf.utilities.surfit('magnitudini', image=illum_frame, function="polynomial",
                                xorder=2, yorder=2, xterms="full", ncols=1024, nlines=1024, Stdout=1)
    iraf.noao.imred.generic.normalize(illum_frame)
    correctcard(lista[0])
    data, hdr = pyfits.getdata(illum_frame, 0, header=True)
    data0, hdr0 = pyfits.getdata(lista[0], 0, header=True)
    delete(illum_frame)
    pyfits.writeto(illum_frame, float32(data), hdr0)
    flatfield0 = string.split(flatfield, '/')[-1]
    ntt.util.updateheader(
        illum_frame, 0, {'MKILLUM': [flatfield0, 'flat field']})
    display_image(illum_frame, 1, '', '', False)
    for i in range(0, len(lista)):  # in lista:
        img = lista[i]
        delete('pippo.' + str(i) + '.mag')
        delete('C' + img)
        delete('C' + re.sub('.fits', '_sky.fits', img))
#    delete('C*.fits.mag.1')
#    iraf.hedit(illum_frame,'MKILLUM','Illum. corr. created '+flatfield,add='yes',update='yes',verify='no')
    return illum_frame
示例#53
0
文件: rcred.py 项目: scizen9/kpy
def create_masterflat(flatdir=None, biasdir=None, channel='rc'):
    '''
    Creates a masterflat from both dome flats and sky flats if the number of counts in the given filter
    is not saturated and not too low (between 3000 and 40000). 
    '''
    
    
    if (flatdir == None or flatdir==""): flatdir = "."
        
    if (biasdir == None or biasdir==""): biasdir = "."
        
    os.chdir(flatdir)
    
    if (len(glob.glob("Flat_%s*norm.fits"%channel)) == 4):
        logger.info( "Master Flat exists!")
        return 
    if (len(glob.glob("Flat_%s*norm.fits"%channel)) > 0):
        logger.info( "Some Master Flat exist!")
        return 
    else:
        logger.info( "Starting the Master Flat creation!")

    bias_slow = "Bias_%s_slow.fits"%channel
    bias_fast = "Bias_%s_fast.fits"%channel
    
    if (not os.path.isfile(bias_slow) and not os.path.isfile(bias_fast) ):
        create_masterbias(biasdir)
     
    lsflat = []
    lfflat = []
    
    obj = ""
    imtype = ""
    
    #Select all filts that are Flats with same instrument
    for f in glob.glob(channel+"*fits"):
        try:
            if fitsutils.has_par(f, "OBJECT"):
                obj = str.upper(fitsutils.get_par(f, "OBJECT"))
            else:
                continue

            if fitsutils.has_par(f, "IMGTYPE"):
                imtype = str.upper(fitsutils.get_par(f, "IMGTYPE"))
            else:
                continue
        
            #if ("RAINBOW CAM" in str.upper(fitsutils.get_par(f, "CAM_NAME")) and  ("DOME" in  obj or "FLAT" in obj or "Twilight" in obj or "TWILIGHT" in imtype or "DOME" in imtype)):
            if ( "TWILIGHT" in imtype):

                if (fitsutils.get_par(f, "ADCSPEED")==2):
                    lfflat.append(f)
                else:
                    lsflat.append(f)
        except:
            logger.error( "Error with retrieving parameters for file %s"% f)
            pass
                
    logger.info( "Files for slow flat %s"% lsflat)
    logger.info( "Files for fast flat %s"% lfflat)
    
    fsfile ="lflat_slow_"+channel
    np.savetxt(fsfile, np.array(lsflat), fmt="%s")
    fffile ="lflat_fast_"+channel
    np.savetxt(fffile, np.array(lfflat), fmt="%s")


    # Running IRAF
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    
    #Remove bias from the flat
    if len(lsflat) >0:
        iraf.imarith("@"+fsfile, "-", bias_slow, "b_@"+fsfile)
    
    if len(lfflat) >0:
        iraf.imarith("@"+fffile, "-", bias_fast, "b_@"+fffile)    
    
    #Remove the list files
    os.remove(fsfile)
    os.remove(fffile)
    
    #Slices the flats.
    debiased_flats = glob.glob("b_*.fits")
    for f in debiased_flats:
        logger.info( "Slicing file %s"% f)
        try:
            slice_rc(f)
        except:
            logger.error("Error when slicing file... deleting the unsliced one...")
        #Remove the un-sliced file
        os.remove(f)
        
    #Selects the ones that are suitable given the number of counts and combines them.
    bands = ['u', 'g', 'r', 'i']
    for b in bands:
        out = "Flat_%s_%s.fits"%(channel, b)
        out_norm = out.replace(".fits","_norm.fits")

        if (os.path.isfile(out_norm)):
            logger.error( "Master Flat for filter %s exists. Skipping..."%b)
            continue
        
        lfiles = []
        for f in glob.glob('b_*_%s.fits'%b):
            d = fits.open(f)[0].data
            if np.percentile(d, 90)>4000 and np.percentile(d, 90)<40000:
                lfiles.append(f)

        if len(lfiles) == 0:
            logger.error( "WARNING!!! Could not find suitable flats for band %s"%b)
            continue
        if len(lfiles) < 3:
            logger.error( "WARNING!!! Could find less than 3 flats for band %s. Skipping, as it is not reliable..."%b)
            continue
        ffile ="lflat_"+b
        np.savetxt(ffile, np.array(lfiles), fmt="%s")
    
        
        #Cleaning of old files
        if(os.path.isfile(out)): os.remove(out)
        if(os.path.isfile(out_norm)): os.remove(out_norm)
        if(os.path.isfile("Flat_stats")): os.remove("Flat_stats")
        
        
        #Combine flats
        iraf.imcombine(input = "@"+ffile, \
                        output = out, \
                        combine = "median",\
                        scale = "mode",
                        weight = "exposure")
        iraf.imstat(out, fields="image,npix,mean,stddev,min,max,mode", Stdout="Flat_stats")
        st = np.genfromtxt("Flat_stats", names=True, dtype=None)
        #Normalize flats
        iraf.imarith(out, "/", st["MODE"], out_norm)
        
        #Do some cleaning
        logger.info( 'Removing from lfiles')
        for f in glob.glob('b_*_%s.fits'%b):
            os.remove(f)

        os.remove(ffile)
        
        
        if os.path.isfile(fsfile):
            os.remove(fsfile)
        if os.path.isfile(fffile):
            os.remove(fffile)

        #copy into the reference folder with current date
        newdir = os.path.join("../../refphot/", os.path.basename(os.path.abspath(flatdir)))
        if (not os.path.isdir(newdir)):
            os.makedirs(newdir)
        shutil.copy(out_norm, os.path.join(newdir, os.path.basename(out_norm)) )   
示例#54
0
import sys
import os
import glob

glob = glob.glob

# Load the IRAF packages we'll need
try:
    current_dir = os.getcwd()
    if os.getlogin() == "Arthur":
        os.chdir("/Users/Arthur/Ureka/iraf/local")
    from pyraf import iraf

    os.chdir(current_dir)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
except Exception as e:
    print "Failure: could not find pyraf/iraf"
    sys.exit(1)


def make_lists(raw_dir):

    all_raw = glob("{}/*.fits".format(raw_dir))
    all_ot = ["{}_ot.fits".format(os.path.basename(i).split(".fits")[0]) for i in all_raw]
    comps = [i for i in all_ot if "comp" in i]
    darks = [i for i in all_ot if "dark" in i]
    flats = [i for i in all_ot if "dflat" in i]
    zero = [i for i in all_ot if "zero" in i]
    all_otz = [i for i in all_ot if "zero" not in i]
示例#55
0
import os
folderroot = '/Users/lucaizzo/Documents/NOT/test/'
os.chdir(folderroot)
import numpy as np
from astropy.io import fits
from matplotlib import pyplot as plt
import shutil

import sys
from pyraf import iraf
iraf.noao(_doprint=0)
iraf.imred(_doprint=0)
iraf.ccdred(_doprint=0)
iraf.twodspec(_doprint=0)
iraf.longslit(_doprint=0)
iraf.kpnoslit(_doprint=0)
iraf.astutil(_doprint=0)
iraf.onedspec(_doprint=0)
iraf.twodspec.longslit.dispaxis = 2

#read object keywords
for file in os.listdir(os.getcwd()):
    if file.endswith('.fits'):
        testfile = file


hduo = fits.open(testfile)

#name targets (science & standard)
target = hduo[0].header['OBJECT']
#target2 = 'SP0644p375'
示例#56
0
文件: rcred.py 项目: scizen9/kpy
def create_superflat(imdir, filters=["u", "g", "r", "i"]):
    #Locate images for each filter
    imlist = glob.glob("rc*fits")
    
  
    #Run sextractor to locate bright sources
  
    sexfiles = sextractor.run_sex(imlist, overwrite=False)
    maskfiles = []
    
    for i, im in enumerate(imlist): 
        #Create a mask and store it int he mask directory
        maskfile = mask_stars(im, sexfiles[i])        
        maskfiles.append(maskfile)
        fitsutils.update_par(im, "BPM", os.path.relpath(maskfile))
        
    
        
    for filt in filters:
        fimlist = [im for im in imlist if fitsutils.get_par(im, "FILTER") == filt]
        fmasklist = [im for im in maskfiles if fitsutils.get_par(im, "FILTER") == filt]
        
        if len(fimlist) == 0:
            continue
        
        fsfile ="lflat_%s"%filt
        msfile = "lmask_%s"%filt
        np.savetxt(fsfile, np.array(fimlist), fmt="%s")
        np.savetxt(msfile, np.array(fmasklist), fmt="%s")
        
        
        '''masklist = []
        
        for m in fmasklist:
            hdulist = fits.open(m)
            data = hdulist[0].data
            masklist.append(data)
            
            
        masklist = np.array(masklist)
        
        hdu = fits.PrimaryHDU(masklist)
        hdulist = fits.HDUList([hdu])
        hdulist.writeto("mastermask_%s.fits"%filt)'''           
                
        # Running IRAF
        iraf.noao(_doprint=0)
        iraf.imred(_doprint=0)
        iraf.ccdred(_doprint=0)
        
        iraf.imarith("@"+fsfile, "*", "@"+msfile, "m_@"+fsfile)


        #Combine flats
        iraf.imcombine(input = "m_@"+fsfile, \
                        output = "superflat_%s.fits"%filt, \
                        combine = "median",\
                        scale = "mode", \
                        masktype="badvalue",\
                        maskvalue = 0)
                        
        iraf.imstat("superflat_%s.fits"%filt, fields="image,npix,mean,stddev,min,max,mode", Stdout="Flat_stats")
        time.sleep(0.1)
        st = np.genfromtxt("Flat_stats", names=True, dtype=None)
        #Normalize flats
        iraf.imarith("superflat_%s.fits"%filt, "/", st["MODE"], "superflat_%s_norm.fits"%filt)
示例#57
0
文件: rcred.py 项目: nblago/kpy
def create_masterbias(biasdir=None, channel='rc'):
    '''
    Combines slow and fast readout mode biases for the specified channel.
    '''
    
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    
    if (biasdir == None) or biasdir=="": biasdir = "."
        
    outs = "Bias_%s_slow.fits"%channel
    outf = "Bias_%s_fast.fits"%channel

    if (os.path.isfile(os.path.join(biasdir,outs)) and os.path.isfile(os.path.join(biasdir,outf))):
        print "Master Bias exists!"
        return
    else:
        print "Starting the Master Bias creation!"

    os.chdir(biasdir)        
        
    lfastbias = []
    lslowbias = []
    
    #Select all filts that are Bias with same instrument
    for f in glob.glob("*fits"):
        try:
            if (channel == fitsutils.get_par(f, "CHANNEL") and "BIAS" in str.upper(fitsutils.get_par(f, "OBJECT")) ):
                if (fitsutils.get_par(f, "ADCSPEED")==2):
                    lfastbias.append(f)
                else:
                    lslowbias.append(f)
        except:
            pass
                
    print "Files for bias SLOW mode: ", lslowbias
    print "Files for bias FAST mode: ", lfastbias
    
    if len(lfastbias) > 0:
        bfile_fast ="lbias_fast_"+channel
        np.savetxt(bfile_fast, np.array(lfastbias), fmt="%s")
        if (os.path.isfile("Bias_stats_fast")): os.remove("Bias_stats_fast")
        iraf.imstat("@"+bfile_fast, Stdout="Bias_stats_fast")
        
        st = np.genfromtxt("Bias_stats_fast", names=True, dtype=None)
        print st
        
        iraf.imcombine(input = "@"+bfile_fast, \
                    output = outf, \
                    combine = "median",\
                    scale = "mode")
        os.remove(bfile_fast)

    if len(lslowbias) > 0:

        bfile_slow ="lbias_slow_"+channel
        np.savetxt(bfile_slow, np.array(lslowbias), fmt="%s")
        if (os.path.isfile("Bias_stats_slow")): os.remove("Bias_stats_slow")
        iraf.imstat("@"+bfile_slow, Stdout="Bias_stats_slow")
        
        st = np.genfromtxt("Bias_stats_slow", names=True, dtype=None)
        print st
        
        iraf.imcombine(input = "@"+bfile_slow, \
                    output = outs, \
                    combine = "median",\
                    scale = "mode")
        os.remove(bfile_slow)
示例#58
0
文件: rcred.py 项目: scizen9/kpy
def create_masterbias(biasdir=None, channel='rc'):
    '''
    Combines slow and fast readout mode biases for the specified channel.
    '''
    
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    
    if (biasdir is None) or biasdir=="": biasdir = "."
        
    outs = "Bias_%s_slow.fits"%channel
    outf = "Bias_%s_fast.fits"%channel

    doslow = True
    dofast = True
    if (os.path.isfile(os.path.join(biasdir,outs))): 
        logger.warn( "%s master Bias exists!"%outs)
        doslow = False
    if ( os.path.isfile(os.path.join(biasdir,outf))):
        logger.warn("%s master Bias exists!"%outs)
        dofast = False
    if(doslow or dofast):
        logger.info("Starting the Master Bias creation!")
    else:
        return

    os.chdir(biasdir)        
        
    lfastbias = []
    lslowbias = []
    
    #Select all filts that are Bias with same instrument
    for f in glob.glob("rc*fits"):
        try:
            if ( "BIAS" in str.upper(fitsutils.get_par(f, "IMGTYPE")) ):
                if (fitsutils.get_par(f, "ADCSPEED")==2):
                    lfastbias.append(f)
                else:
                    lslowbias.append(f)
        except:
            pass
                
    logger.info("Files for bias SLOW mode: %s"% lslowbias)
    logger.info( "Files for bias FAST mode: %s"% lfastbias)
    
    if len(lfastbias) > 0 and dofast:
        bfile_fast ="lbias_fast_"+channel
        np.savetxt(bfile_fast, np.array(lfastbias), fmt="%s")
        if (os.path.isfile("Bias_stats_fast")): os.remove("Bias_stats_fast")
        iraf.imstat("@"+bfile_fast, Stdout="Bias_stats_fast")
        
        st = np.genfromtxt("Bias_stats_fast", names=True, dtype=None)
        logger.info("%s"%st)
        
        iraf.imcombine(input = "@"+bfile_fast, \
                    output = outf, \
                    combine = "median",\
                    scale = "mode")
        os.remove(bfile_fast)
        
        #copy into the reference folder with current date
        newdir = os.path.join("../../refphot/", os.path.basename(os.path.abspath(biasdir)))
        if (not os.path.isdir(newdir)):
            os.makedirs(newdir)
        shutil.copy(outf, os.path.join(newdir, os.path.basename(outf)) )
    else:
        copy_ref_calib(biasdir, outf)


    if len(lslowbias) > 0 and doslow:

        bfile_slow ="lbias_slow_"+channel
        np.savetxt(bfile_slow, np.array(lslowbias), fmt="%s")
        if (os.path.isfile("Bias_stats_slow")): os.remove("Bias_stats_slow")
        iraf.imstat("@"+bfile_slow, Stdout="Bias_stats_slow")
        
        st = np.genfromtxt("Bias_stats_slow", names=True, dtype=None)
        logger.info("%s"%st)
        
        iraf.imcombine(input = "@"+bfile_slow, \
                    output = outs, \
                    combine = "median",\
                    scale = "mode")
        os.remove(bfile_slow)
        
        #copy into the reference folder with current date
        newdir = os.path.join("../../refphot/", os.path.basename(os.path.abspath(biasdir)))
        if (not os.path.isdir(newdir)):
            os.makedirs(newdir)
        shutil.copy(outs, os.path.join(newdir, os.path.basename(outs)) )  
    else:
        copy_ref_calib(biasdir, outs)
示例#59
0
文件: rcred.py 项目: nblago/kpy
def reduce_image(img, flatdir=None, biasdir=None, cosmic=True, astrometry=True, channel='rc', target_dir='reduced'):
    '''
    Applies Flat field and bias calibrations to the image.
    
    Steps:
    
    1. - Solve astrometry on the entire image.
    2. - Compute master bias and de-bias the image.
    3. - Separate the image into 4 filters.
    4. - Compute flat field for each filter and apply flat fielding on the image.
    5. - Computes cosmic ray rejectionon the entire image.
    6. - Compute zeropoint for each image and store in a log file.
    7. - Plot zeropoint for the night.
    '''
    
    print "Reducing image ", img    

    objectname = fitsutils.get_par(img, "OBJECT").replace(" ", "").replace("]","").replace("[", "")

    print "For object", objectname
    
    #Change to image directory
    mydir = os.path.dirname(img)
    if mydir=="": mydir = "."
    mydir = os.path.abspath(mydir)
    os.chdir(mydir)
    #Create destination directory
    if (not os.path.isdir(target_dir)):
        os.makedirs(target_dir)

    #Rename to the image name only
    img = os.path.basename(img)


    if (astrometry):
        print "Solving astometry for the whole image..."
        img = solve_astrometry(img)
        astro = "a_"
    else:
        astro = ""
        
    
    #Compute BIAS
    if (biasdir == None or biasdir==""): biasdir = "."
    create_masterbias(biasdir)
    
    bias_slow = os.path.join(biasdir, "Bias_%s_%s.fits"%(channel, 'slow'))
    bias_fast = os.path.join(biasdir, "Bias_%s_%s.fits"%(channel, 'fast'))
    
    # Running IRAF to DE-BIAS
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    
    #Compute flat field
    if (flatdir == None or flatdir==""): flatdir = "."
    create_masterflat(flatdir, biasdir)
    
    #New names for the object.
    debiased = "b_" + astro + img
    print "Creating debiased file, ",debiased
    
    if (not os.path.isfile(bias_slow) or not os.path.isfile(bias_fast)):
        print "Master bias not found!"
        return

    #Debias
    if (fitsutils.get_par(img, "ADCSPEED")==2):
        iraf.imarith(img, "-", bias_fast, debiased)
        fitsutils.update_par(debiased, "BIASFILE", bias_fast)
        fitsutils.update_par(debiased, "RDNOISE", 20.)

    else:
        iraf.imarith(img, "-", bias_slow, debiased)
        fitsutils.update_par(debiased, "BIASFILE", bias_slow)
        fitsutils.update_par(debiased, "RDNOISE", 4.)

    #Set negative counts to zero
    hdu = pf.open(debiased)
    header = hdu[0].header
    hdu[0].data[hdu[0].data<0] = 0
    hdu.writeto(debiased, clobber=True)

    #Slicing the image for flats  
    slice_names = slice_rc(debiased)

    
    #Remove un-sliced image
    os.remove(debiased)

    # DE-flat each filter and store under object name
    for i, debiased_f in enumerate(slice_names):
        b = fitsutils.get_par(debiased_f, 'filter')
        
        deflatted = "f_b_" + astro + objectname + "_%s.fits"%b

        #Flat to be used for that filter
        flat = os.path.join(flatdir, "Flat_%s_%s_norm.fits"%(channel, b))

        if (not os.path.isfile(flat)):
            print "Master flat not found in", flat
            return
        #Cleans the deflatted file if exists
        if (os.path.isfile(deflatted)):
            os.remove(deflatted)
            
        iraf.imarith(debiased_f, "/", flat, deflatted)
        
        #Removes the de-biased file
        os.remove(debiased_f)
        
        print "Updating header with original filename and flat field used."
        fitsutils.update_par(deflatted, "ORIGFILE", img)
        fitsutils.update_par(deflatted, "FLATFILE", flat)

        slice_names[i] = deflatted
            
            
    if (cosmic):
        print "Correcting for cosmic rays..."
        # Correct for cosmics each filter
        for i, deflatted in enumerate(slice_names):
            cclean = "c_" +name
            clean_cosmic(os.path.join(os.path.abspath(mydir), deflatted), cclean)
            slice_names[i] = cclean
           
    #Moving files to the target directory
    for name in slice_names:
        if (os.path.isfile(name)):
            shutil.move(name, os.path.join(target_dir, name))
示例#60
0
文件: rcred.py 项目: scizen9/kpy
def reduce_image(image, flatdir=None, biasdir=None, cosmic=False, astrometry=True, channel='rc', target_dir='reduced', overwrite=False):
    '''
    Applies Flat field and bias calibrations to the image.
    
    Steps:
    
    1. - Solve astrometry on the entire image.
    2. - Computes cosmic ray rejectionon the entire image.
    3. - Compute master bias (if it does not exist) and de-bias the image.
    4. - Separate the image into 4 filters.
    5. - Compute flat field for each filter (if it does not exist) and apply flat fielding on the image.
    6. - Compute the image zeropoint.

    '''
    
    logger.info("Reducing image %s"% image)    

    print "Reducing image ", image    

    
    image = os.path.abspath(image)
    imname = os.path.basename(image).replace(".fits", "")
    try:
        objectname = fitsutils.get_par(image, "NAME").replace(" ","")+"_"+fitsutils.get_par(image, "FILTER")
    except:
        logger.error( "ERROR, image "+ image + " does not have a NAME or a FILTER!!!")
        return

    print "For object", objectname
    logger.info( "For object %s"% objectname)


    
    
    #Change to image directory
    mydir = os.path.dirname(image)
    if mydir=="": mydir = "."
    mydir = os.path.abspath(mydir)
    os.chdir(mydir)
    #Create destination directory
    if (not os.path.isdir(target_dir)):
        os.makedirs(target_dir)

    #If we don't want to overwrite the already extracted images, we check wether they exist.
    if (not overwrite):
        existing = True
        for band in ['u', 'g', 'r', 'i']:
            destfile = os.path.join(target_dir, imname + "_f_b_a_%s_%s_0.fits"%(objectname, band))
            logger.info( "Looking if file %s exists: %s"%( destfile, \
                (os.path.isfile(destfile) ) ) )
            existing = existing and (os.path.isfile( destfile ) )
        if existing:
            return []


        
    #Initialize the basic parameters.
    init_header_reduced(image)

    astro = ""
    if (astrometry):
        logger.info( "Solving astometry for the whole image...")
        img = solve_astrometry(image)
        if (os.path.isfile(img)):
            astro="a_"
            fitsutils.update_par(img, "IQWCS", 1)
        else:
            logger.error( "ASTROMETRY DID NOT SOLVE ON IMAGE %s"% image)
            img = image

    #Update noise parameters needed for cosmic reection
    if (fitsutils.get_par(img, "ADCSPEED")==2):
        fitsutils.update_par(img, "RDNOISE", 20.)
    else:
        fitsutils.update_par(img, "RDNOISE", 4.)

    if (cosmic):
        logger.info( "Correcting for cosmic rays...")
        # Correct for cosmics each filter
        cleanimg = clean_cosmic(os.path.join(os.path.abspath(mydir), img))
        img = cleanimg
    
    
    
    #Get basic statistics for the image
    nsrc, fwhm, ellip, bkg = sextractor.get_image_pars(img)
    
    logger.info( "Sextractor statistics: nscr %d, fwhm (pixel) %.2f, ellipticity %.2f"% (nsrc, fwhm, ellip))
    print "Sextractor statistics: nscr %d, fwhm (pixel) %.2f, ellipticity %.2f"% (nsrc, fwhm, ellip)

    
    dic = {"SEEPIX": fwhm/0.394, "NSRC":nsrc, "ELLIP":ellip}
    #Update the seeing information from sextractor
    fitsutils.update_pars(img, dic)
    
    
    #Compute BIAS
    if (biasdir is None or biasdir==""): biasdir = "."
    create_masterbias(biasdir)
    
    bias_slow = os.path.join(biasdir, "Bias_%s_%s.fits"%(channel, 'slow'))
    bias_fast = os.path.join(biasdir, "Bias_%s_%s.fits"%(channel, 'fast'))
    
    # Running IRAF to DE-BIAS
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    
    #Compute flat field
    if (flatdir is None or flatdir==""): flatdir = "."
    create_masterflat(flatdir, biasdir)
    
    #New names for the object.
    debiased = os.path.join(os.path.dirname(img), "b_" + os.path.basename(img))
    logger.info( "Creating debiased file, %s"%debiased)
    
    if ( (fitsutils.get_par(img, "ADCSPEED")==0.1 and not os.path.isfile(bias_slow)) \
        or (fitsutils.get_par(img, "ADCSPEED")==2 and not os.path.isfile(bias_fast)) ):
        logger.warn( "Master bias not found! Tryting to copy from reference folder...")
        copy_ref_calib(mydir, "Bias")
        if ( (fitsutils.get_par(img, "ADCSPEED")==0.1 and not os.path.isfile(bias_slow)) \
        or (fitsutils.get_par(img, "ADCSPEED")==2 and not os.path.isfile(bias_fast)) ):
            logger.error( "Bias not found in reference folder")
            return


    #Clean first
    if (os.path.isfile(debiased)):
        os.remove(debiased)
        
    #Debias
    if (fitsutils.get_par(img, "ADCSPEED")==2):
        iraf.imarith(img, "-", bias_fast, debiased)
        fitsutils.update_par(debiased, "BIASFILE", bias_fast)
        fitsutils.update_par(debiased, "RDNOISE", 20.)

    else:
        iraf.imarith(img, "-", bias_slow, debiased)
        fitsutils.update_par(debiased, "BIASFILE", bias_slow)
        fitsutils.update_par(debiased, "RDNOISE", 4.)

    #Set negative counts to zero
    hdu = fits.open(debiased)
    header = hdu[0].header
    hdu[0].data[hdu[0].data<0] = 0
    hdu.writeto(debiased, clobber=True)

    #Slicing the image for flats  
    slice_names = slice_rc(debiased)
    print "Creating sliced files, ", slice_names

    
    #Remove un-sliced image
    os.remove(debiased)

    # DE-flat each filter and store under object name
    for i, debiased_f in enumerate(slice_names):
        b = fitsutils.get_par(debiased_f, 'filter')
        
        deflatted = os.path.join(os.path.dirname(image), target_dir, imname + "_f_b_" + astro + objectname + "_%s.fits"%b)

        #Flat to be used for that filter
        flat = os.path.join(flatdir, "Flat_%s_%s_norm.fits"%(channel, b))

        if (not os.path.isfile(flat)):
            logger.warn( "Master flat not found in %s"% flat)
            copy_ref_calib(mydir, "Flat_%s_%s_norm"%(channel, b))
            continue
        else:
            logger.info( "Using flat %s"%flat)
            
        #Cleans the deflatted file if exists
        if (os.path.isfile(deflatted)):
            os.remove(deflatted)

        if (os.path.isfile(debiased_f) and os.path.isfile(flat)):
            logger.info( "Storing de-flatted %s as %s"%(debiased_f, deflatted))
            time.sleep(1)
            iraf.imarith(debiased_f, "/", flat, deflatted)
        else:
            logger.error( "SOMETHING IS WRONG. Error when dividing %s by the flat field %s!"%(debiased_f, flat))
        
        #Removes the de-biased file
        os.remove(debiased_f)
        
        logger.info( "Updating header with original filename and flat field used.")
        fitsutils.update_par(deflatted, "ORIGFILE", os.path.basename(image))
        fitsutils.update_par(deflatted, "FLATFILE", flat)

        slice_names[i] = deflatted
            
                
    #Moving files to the target directory
    for image in slice_names:
        bkg = get_median_bkg(image)
        fitsutils.update_par(image, "SKYBKG", bkg)
        #shutil.move(name, newname)

        
    #Compute the zeropoints
    for image in slice_names:
        zeropoint.calibrate_zeropoint(image)
        
    return slice_names