Exemplo n.º 1
0
def continuum(spec_in, spec_out, order=1, interactive=True):
    """
    Continuum normalize spectra.

    Parameters
    ----------

    spec_in: str;
        Name of the input spectra.

    spec_out: str;
        Name of the output spectra.

    order: int,str;
        The order of the polynomials or the number of the spline pieces.

    interactive: bool;
        Perform the fit interactively using the icfit commands?
    """
    iraf.onedspec(_doprint=0)
    iraf.onedspec.continuum.unlearn()
    iraf.onedspec.continuum.input = spec_in
    iraf.onedspec.continuum.output = spec_out
    iraf.onedspec.continuum.order = str(order)
    if not interactive:
        iraf.onedspec.continuum.interactive = 'no'
    iraf.onedspec.continuum(mode='h')
Exemplo n.º 2
0
def scopy(spec_in, spec_out, wstart, wend):
    """
    Cut a spectrum on given wavelengths.

    Parameters
    ==========

    spec_in: str;
        Name of input spectrum.

    sepc_out: str;
        name of output spectrum.

    wstart: int;
        Beginning wavelength

    wend: int;
        Ending wavelength.

    """
    iraf.onedspec(_doprint=0)
    iraf.scopy.w1 = wstart
    iraf.scopy.w2 = wend
    iraf.scopy.input = spec_in
    iraf.scopy.output = spec_out
    iraf.scopy(mode='h')
Exemplo n.º 3
0
def standard(imlist_name, obj, obs):
    """
    standard -- Add standard stars to sensitivity file
    standard input [records] output
    standard wfcdbstd.ms.fits (no) "sao"

    extinct = "/iraf/iraf/noao/lib/onedstds/iidscal/feige56.dat"

    caldir  = "/iraf/iraf/noao/lib/onedstds/iidscal/"
    """
    import glob
    import os, sys
    from pyraf import iraf
    iraf.noao()
    iraf.onedspec()
    imlist = glob.glob(imlist_name)
    imlist.sort()
    for i in range(len(imlist)):
        inim = imlist[i]
        iraf.standard(
            input=inim,
            output='s' + inim[:-5],
            extinct='/iraf/iraf/noao/lib/onedstds/iidscal/feige56.dat',
            observatory=obs,
            caldir="/iraf/iraf/noao/lib/onedstds/iidscal/",
            star_name=obj)
Exemplo n.º 4
0
def run_scombine(listin, fn):
    namefil = []
    namelist = open(listin, "r")
    for cols in (raw.strip().split() for raw in namelist):
        namefil.append(cols[0])
    namelist.close()
    obs = []
    count = -1
    for i in range(len(namefil)):
        if namefil[i - 1][0:2] != namefil[i][0:2]:
            obs.append([])
            count += 1
        obs[count].append(namefil[i])
    spec = []
    for i in range(len(obs[0])):
        spec.append([])
        for j in range(len(obs)):
            spec[i].append(obs[j][i])
    iraf.noao()
    iraf.onedspec()
    for i in range(len(spec)):
        temp = open('temp_list', 'w')
        for j in range(len(spec[i])):
            temp.write(spec[i][j] + '\n')
        temp.close()
        if (spec[i][0][-6] == 'L') or (spec[i][0][-6] == 'U'):
            name_index = '0_' + spec[i][0][-6]
        else:
            name_index = spec[i][0][-6] + '_' + spec[i][0][-8]
        if len(fibnum) != 0:
            newname = ob_id[fibnum.index(name_index[0])] + '_' + name_index[-1]
            name_index = newname
        iraf.scombine('@temp_list', name_index, logfile='combine_log')
        fn.append(name_index)
Exemplo n.º 5
0
def thar_cal(object_b_fn_ec, object_b_fn_ec_w, colour):
# Import IRAF modules:
  iraf.noao(_doprint=0)
  iraf.onedspec(_doprint=0)
# Check input file and reference extraction exist before proceeding:
  if os.path.isfile(object_b_fn_ec) == True:
# Perform dispersion correction:
    iraf.dispcor(input=object_b_fn_ec, output=object_b_fn_ec_w)
    print ' '
    print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
    print colour.capitalize() + ' object spectrum             '
    print '(' + str(object_b_fn_ec) + ')'
    print 'successfully wavelength calibrated in file         '
    print str(object_b_fn_ec_w) + '.'
    print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
    print ' '
  else:
    print ' '
    print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
    print 'Input frame                                        ' 
    print str(object_b_fn_ec)
    print 'does not exist. Exiting script.                    '
    print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
    print ' '
    print ' '
    sys.exit()
Exemplo n.º 6
0
    def StandardTask(self, InputFile, OutputFile, FitsFolder, airmass_value, exptime_value):
   
        iraf.noao(_doprint=0)
        iraf.onedspec(_doprint=0)               
        
        #From the fits file determine which is the star being treated
        StarName = self.StarnameFromFileName(InputFile)
        
        #Get the corresponding Calibration file
        CalibrationFile, Bandwidth, Bandsep   = self.getCalibrationFile(StarName)
        
        #Incase no output name is given, we generate one with the provided "preffix" (The defaul format is a_std_wolf.dat)
        if OutputFile == None:
            OutputFile    = 'a_std_' + StarName
        
        #Prepare dictionary with the configuration for the tasks           
        Standard_conf_Comb  = self.StandardAttributes(InputFile, OutputFile, CalibrationFile, FitsFolder, airmass_value, exptime_value, Bandwidth, Bandsep)

        #Display the equivalent command in IRAF
        Command = self.printIrafCommand('standard', Standard_conf_Comb)
        print '--- Using the command'
        print Command
        
        #Run the task        
        iraf.onedspec.standard(**Standard_conf_Comb)
        
        return OutputFile 
Exemplo n.º 7
0
def apall_config(
    config_file,
    config_default='/Users/ando/andry/research/Make_software/saltrss/apall_conf.yml'
):
    config = yaml.safe_load(open(config_default))
    custom_config = {}
    if os.path.exists(config_file):
        custom_config = yaml.safe_load(open(config_file))

    config = config_merge(custom_config, config)

    sections = [
        'iraf.apall',
    ]
    iraf.noao(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.apextract(_doprint=0)
    iraf.apextract.unlearn()
    iraf.apall.unlearn()
    iraf.apsum.unlearn()
    iraf.apextract.setParam('dispaxis', 1)

    for section_name in sections:
        params = config[section_name].items()
        for param_id in params:
            eval(section_name).setParam(param_id[0], param_id[1])
    return config
Exemplo n.º 8
0
def classifyfast(fitsfile, program='snid'):
    import floyds
    import re, os, sys, string
    from numpy import array, argsort
    from pyraf import iraf

    iraf.onedspec(_doprint=0)
    imgasci = re.sub('.fits', '.asci', fitsfile)
    floyds.util.delete(imgasci)
    iraf.onedspec.wspectext(fitsfile + '[*,1,1]', imgasci, header='no')

    if program == 'snid':
        print '\n######################\nclassify with snid\n'
        os.system('snid plot=0 iquery=0 inter=0 verbose=0 ' + imgasci)
        f = open(re.sub('.asci', '_snid.output', imgasci), 'r')
        ss = f.readlines()
        f.close()
        ss = ss[ss.index('### type fraction/redshift/age ###\n') + 2:ss.index(
            '### rlap-ordered template listings ###\n') - 2]
        bb = {}
        for i in ss:
            if string.split(i)[0] in ['Ia', 'Ia-norm', 'Ia-91T', 'Ia-91bg', 'Ia-csm', 'Ia-pec', 'Ib', 'Ib-norm',
                                      'Ib-pec', 'IIb', 'Ic', 'Ic-norm', 'Ic-pec', 'Ic-broad', 'II', 'II-pec', 'IIn',
                                      'IIP', 'IIL', 'NotSN', 'AGN', 'GAL', 'LBV']:
                bb[string.split(i)[0]] = {'all': string.split(i)[1:], 'frac': string.split(i)[2],
                                          'phase': string.split(i)[6], 'red': string.split(i)[3]}
        _type, _frac, _phase = [], [], []
        for ii in argsort(array([bb[i]['frac'] for i in bb.keys()], float))[::-1]:
            _type.append(bb.keys()[ii])
            _frac.append(float(bb[bb.keys()[ii]]['frac']))
            _phase.append(float(bb[bb.keys()[ii]]['phase']))
    elif program == 'superfit':
        print 'classifiy with suprfit'
    elif program == 'gelato':
        print 'gelato'
    else:
        print 'warning: program not found'
    trigger = False
    if _type[0] not in ['AGN', 'NotSN', 'Gal']:
        if _phase[0] <= 0:
            trigger = True
        else:
            if 'pec' in _type[0]:
                trigger = True
            elif _type[0] in ['Ia-csm']:
                trigger = True
    print '\n#########################\n'
    print '\n   Type        %        phase   (most probable)'
    print '%7s\t%7s\t%7s' % (str(_type[0]), str(_frac[0]), str(_phase[0]))
    print '\n   Type        %        phase    (second most probable)'
    print '%7s\t%7s\t%7s' % (str(_type[1]), str(_frac[1]), str(_phase[1]))
    if trigger:
        print '\n##################\n INTERESTING SN !!!!\n ACTIVATE FOLLOW-UP WITH FULL LCOGT NETWORK !!!!\n\n'
    else:
        print '\n##################\n BORING SN ...... \n\n'
    #     print '\n We report that a spectrum of '+fitsfile+' was obtained robotically on Aug XX with the FLOYDS spectrograph '+
    #     ' at "Faulkes Telescope XXX". The spectrum (range 320-1000 nm) shows it to be a SN Ia roughly one week before maximum light, and is consistent with the host galaxy (CGCG 425-26) redshift of z=0.027. Classification was performed via supernova spectrum cross correlation using SNID (Blondin & Tonry, 2007, ApJ, 666, 1024).'
    return _type, _frac, _phase
Exemplo n.º 9
0
def classifyfast(fitsfile, program='snid'):
    import floyds
    import re, os, sys, string
    from numpy import array, argsort
    from pyraf import iraf

    iraf.onedspec(_doprint=0)
    imgasci = re.sub('.fits', '.asci', fitsfile)
    floyds.util.delete(imgasci)
    iraf.onedspec.wspectext(fitsfile + '[*,1,1]', imgasci, header='no')

    if program == 'snid':
        print '\n######################\nclassify with snid\n'
        os.system('snid plot=0 iquery=0 inter=0 verbose=0 ' + imgasci)
        f = open(re.sub('.asci', '_snid.output', imgasci), 'r')
        ss = f.readlines()
        f.close()
        ss = ss[ss.index('### type fraction/redshift/age ###\n') + 2:ss.index(
            '### rlap-ordered template listings ###\n') - 2]
        bb = {}
        for i in ss:
            if string.split(i)[0] in ['Ia', 'Ia-norm', 'Ia-91T', 'Ia-91bg', 'Ia-csm', 'Ia-pec', 'Ib', 'Ib-norm',
                                      'Ib-pec', 'IIb', 'Ic', 'Ic-norm', 'Ic-pec', 'Ic-broad', 'II', 'II-pec', 'IIn',
                                      'IIP', 'IIL', 'NotSN', 'AGN', 'GAL', 'LBV']:
                bb[string.split(i)[0]] = {'all': string.split(i)[1:], 'frac': string.split(i)[2],
                                          'phase': string.split(i)[6], 'red': string.split(i)[3]}
        _type, _frac, _phase = [], [], []
        for ii in argsort(array([bb[i]['frac'] for i in bb.keys()], float))[::-1]:
            _type.append(bb.keys()[ii])
            _frac.append(float(bb[bb.keys()[ii]]['frac']))
            _phase.append(float(bb[bb.keys()[ii]]['phase']))
    elif program == 'superfit':
        print 'classifiy with suprfit'
    elif program == 'gelato':
        print 'gelato'
    else:
        print 'warning: program not found'
    trigger = False
    if _type[0] not in ['AGN', 'NotSN', 'Gal']:
        if _phase[0] <= 0:
            trigger = True
        else:
            if 'pec' in _type[0]:
                trigger = True
            elif _type[0] in ['Ia-csm']:
                trigger = True
    print '\n#########################\n'
    print '\n   Type        %        phase   (most probable)'
    print '%7s\t%7s\t%7s' % (str(_type[0]), str(_frac[0]), str(_phase[0]))
    print '\n   Type        %        phase    (second most probable)'
    print '%7s\t%7s\t%7s' % (str(_type[1]), str(_frac[1]), str(_phase[1]))
    if trigger:
        print '\n##################\n INTERESTING SN !!!!\n ACTIVATE FOLLOW-UP WITH FULL LCOGT NETWORK !!!!\n\n'
    else:
        print '\n##################\n BORING SN ...... \n\n'
    #     print '\n We report that a spectrum of '+fitsfile+' was obtained robotically on Aug XX with the FLOYDS spectrograph '+
    #     ' at "Faulkes Telescope XXX". The spectrum (range 320-1000 nm) shows it to be a SN Ia roughly one week before maximum light, and is consistent with the host galaxy (CGCG 425-26) redshift of z=0.027. Classification was performed via supernova spectrum cross correlation using SNID (Blondin & Tonry, 2007, ApJ, 666, 1024).'
    return _type, _frac, _phase
Exemplo n.º 10
0
def spectraresolution3(img0, ww=25):
    import pyfits
    import os, string, re, sys
    import lickshane
    from numpy import arange, mean, compress, array, median
    from numpy import interp as ninterp
    from pyraf import iraf

    iraf.onedspec(_doprint=0)

    img = re.sub('arc_', '', img0)
    data, hdr = pyfits.getdata(img0, 0, header=True)
    crvals = lickshane.util.readkey3(hdr, 'CRVAL1')
    cds = lickshane.util.readkey3(hdr, 'CD1_1')
    xx = arange(len(data))
    yy = data
    aa = crvals + (xx) * cds

    maxtab, mintab = lickshane.util.peakdet(yy, median(yy) * 10)
    if len(maxtab) <= 0:
        maxtab, mintab = lickshane.util.peakdet(yy, median(yy))
    if len(maxtab) <= 0:
        lines = []
    else:
        lines0, b = zip(*maxtab)
        lines = crvals + (array(lines0)) * cds
    if len(lines) > 0:
        lines = compress((aa[0] < array(lines)) & (array(lines) < aa[-1]),
                         array(lines))
        cursor = ''
        yym = ninterp(lines - ww, aa, yy)
        yyp = ninterp(lines + ww, aa, yy)
        for i in range(0, len(lines)):
            cursor = cursor + str(lines[i] - ww) + '  ' + str(
                yym[i]) + '  1   k\n'
            cursor = cursor + str(lines[i] + ww) + '  ' + str(
                yyp[i]) + '  1   k\n'
        cursor = cursor + str(lines[i] + ww) + '  ' + str(yyp[i]) + '  1   q\n'
        ff = open('_cursor', 'w')
        ff.write(cursor)
        ff.close()
        from pyraf import iraf

        aaa = iraf.noao.onedspec.bplot(img0,
                                       cursor='_cursor',
                                       spec2='',
                                       new_ima='',
                                       overwri='yes',
                                       Stdout=1)
        fw = []
        for i in aaa[1:]:
            fw.append(float(string.split(string.split(i, '=')[-1], 'k')[0]))
        lickshane.util.delete('_cursor')
        res = (aa[0] + ((aa[-1] - aa[0]) / 2)) / mean(fw)
    else:
        res = 9999
    return res
Exemplo n.º 11
0
def normalize(fn):
    iraf.noao()
    iraf.onedspec()
    for i in range(len(fn)):
        if fn[i][-1] == 'L':
            nord = '10'
        else:
            nord = '7'
        iraf.continuum(fn[i], fn[i], order=nord, ask='no', logfile='norm_log')
Exemplo n.º 12
0
def spectraresolution2(img0, ww=25):
    import pyfits
    import os, string, re, sys
    import floyds
    from numpy import arange, mean, compress, array
    from numpy import interp as ninterp
    from pyraf import iraf

    iraf.onedspec(_doprint=0)

    id = 'database/id' + re.sub('.fits', '', img0)
    img = re.sub('arc_', '', img0)
    data, hdr = pyfits.getdata(img0, 0, header=True)
    crvals = floyds.util.readkey3(hdr, 'CRVAL1')
    cds = floyds.util.readkey3(hdr, 'CD1_1')
    xx = arange(len(data))
    yy = data
    aa = crvals + (xx) * cds
    #   read identified lines from id file
    f = open(id, 'r')
    ss = f.readlines()
    f.close()
    indices = [i for i, x in enumerate(ss) if "begin" in x]
    if len(indices) <= 1:
        dd = ss[indices[0]:len(ss)]
    else:
        dd = ss[indices[0]:indices[1]]
    #         dd=ss[indices[-1]:len(ss)]
    start = [i for i, x in enumerate(dd) if "features" in x][0] + 1
    stop = [i for i, x in enumerate(dd) if "function" in x][0]
    ff = dd[start:stop]
    lines = []
    if len(ff) > 0:
        for i in ff:    lines.append(float(string.split(i)[2]))
        print lines
        lines = compress((aa[0] < array(lines)) & (array(lines) < aa[-1]), array(lines))
        cursor = ''
        yym = ninterp(lines - ww, aa, yy)
        yyp = ninterp(lines + ww, aa, yy)
        for i in range(0, len(lines)):
            cursor = cursor + str(lines[i] - ww) + '  ' + str(yym[i]) + '  1   k\n'
            cursor = cursor + str(lines[i] + ww) + '  ' + str(yyp[i]) + '  1   k\n'
        cursor = cursor + str(lines[i] + ww) + '  ' + str(yyp[i]) + '  1   q\n'
        ff = open('_cursor', 'w')
        ff.write(cursor)
        ff.close()
        from pyraf import iraf

        aaa = iraf.noao.onedspec.bplot(img0, cursor='_cursor', spec2='', new_ima='', overwri='yes', Stdout=1)
        fw = []
        for i in aaa[1:]: fw.append(float(string.split(string.split(i, '=')[-1], 'k')[0]))
        floyds.util.delete('_cursor')
        res = (aa[0] + ((aa[-1] - aa[0]) / 2)) / mean(fw)
    else:
        res = 9999
    return res
Exemplo n.º 13
0
def spectraresolution2(img0, ww=25):
    from astropy.io import fits
    import os, string, re, sys
    import floyds
    from numpy import arange, mean, compress, array
    from numpy import interp as ninterp
    from pyraf import iraf

    iraf.onedspec(_doprint=0)

    id = 'database/id' + re.sub('.fits', '', img0)
    img = re.sub('arc_', '', img0)
    data, hdr = fits.getdata(img0, 0, header=True)
    crvals = floyds.util.readkey3(hdr, 'CRVAL1')
    cds = floyds.util.readkey3(hdr, 'CD1_1')
    xx = arange(len(data))
    yy = data
    aa = crvals + (xx) * cds
    #   read identified lines from id file
    f = open(id, 'r')
    ss = f.readlines()
    f.close()
    indices = [i for i, x in enumerate(ss) if "begin" in x]
    if len(indices) <= 1:
        dd = ss[indices[0]:len(ss)]
    else:
        dd = ss[indices[0]:indices[1]]
    #         dd=ss[indices[-1]:len(ss)]
    start = [i for i, x in enumerate(dd) if "features" in x][0] + 1
    stop = [i for i, x in enumerate(dd) if "function" in x][0]
    ff = dd[start:stop]
    lines = []
    if len(ff) > 0:
        for i in ff:    lines.append(float(string.split(i)[2]))
        print lines
        lines = compress((aa[0] < array(lines)) & (array(lines) < aa[-1]), array(lines))
        cursor = ''
        yym = ninterp(lines - ww, aa, yy)
        yyp = ninterp(lines + ww, aa, yy)
        for i in range(0, len(lines)):
            cursor = cursor + str(lines[i] - ww) + '  ' + str(yym[i]) + '  1   k\n'
            cursor = cursor + str(lines[i] + ww) + '  ' + str(yyp[i]) + '  1   k\n'
        cursor = cursor + str(lines[i] + ww) + '  ' + str(yyp[i]) + '  1   q\n'
        ff = open('_cursor', 'w')
        ff.write(cursor)
        ff.close()
        from pyraf import iraf

        aaa = iraf.noao.onedspec.bplot(img0, cursor='_cursor', spec2='', new_ima='', overwri='yes', Stdout=1)
        fw = []
        for i in aaa[1:]: fw.append(float(string.split(string.split(i, '=')[-1], 'k')[0]))
        floyds.util.delete('_cursor')
        res = (aa[0] + ((aa[-1] - aa[0]) / 2)) / mean(fw)
    else:
        res = 9999
    return res
Exemplo n.º 14
0
def normalize(setup, filelist):
    iraf.noao()
    iraf.onedspec()
    for i in (filelist):
        name = setup + '/' + i
        iraf.continuum(name,
                       name,
                       order=1,
                       ask='no',
                       logfile=setup + '/norm_log')
Exemplo n.º 15
0
def standard_sens(infile, caldir=')_.caldir', noext=False, overwrite=False):
    print('\n#############################')
    print('Deriving sensitivity funciton')

    # Not to display items in IRAF packages
    sys.stdout = open('/dev/null', 'w')
    iraf.noao()
    iraf.onedspec()
    sys.stdout = sys.__stdout__  # Back to the stadard output

    basename = fits.getval(infile, 'FRAMEID')
    std = basename + '.std'

    if noext:
        extinction = ''
    else:
        extinction = fi.filibdir + 'mkoextinct.dat'

    if os.path.isfile(std) and not overwrite:
        print('\t The output file of IRAF STANDARD '+\
              'task already exits. '+std)
        print('\t STANDARD is skipped.')
    else:
        if overwrite:
            print('Removing ' + std)
            try:
                os.remove(std)
            except:
                pass

        iraf.standard(infile, std, extinction=extinction,\
                      caldir=caldir, beam_sw='no', aperture='')
        print('Output file of IRAF STANDARD task: ' + std)

    sens = basename + '.sens.fits'
    if os.path.isfile(sens) and not overwrite:
        print('\t The output file of IRAF SENSFUNC task already exits. ' +
              sens)
        print('\t SENSFUNC is skipped.')
    else:
        if overwrite:
            print('Removing ' + sens)
            try:
                os.remove(sens)
            except:
                pass

        iraf.sensfunc(std, sens, aperture='', ignoreaps='yes',\
                      extinction=extinction,\
                      logfile='sensfunc.log')
        print('Output file of IRAF SENSFUNC task: ' + sens)

    return sens, True
Exemplo n.º 16
0
def wavelength_calibration(targetdir):

    """
    Does wavelength calibration.

    Writes every fit to database so make sure it's using the correct one.

    This needs to be run in object directory for database

    """

    print 'Target directory is ' + targetdir
    print 'Doing wavelength calibration...'

    if os.getcwd() != targetdir:

        print 'Warning: current working directory must be target directory!'

        return None

    iraf.noao(_doprint=0)
    iraf.onedspec(_doprint=0)

    iraf.unlearn('identify')

    iraf.identify.setParam('images','aimcomb.fits')
    iraf.identify.setParam('coordli','/home/lc585/Dropbox/IoA/WHT_Proposal_2015a/argon+xenon.dat')
    iraf.identify.setParam('niterat',1)
    iraf.identify.setParam('function','spline3')
    iraf.identify.setParam('order',3)
    iraf.identify.setParam('zwidth',200.0) #  Zoom graph width in user units
    iraf.identify.setParam('database','database')

    iraf.identify()

    # Update fits header

    print '\n' '\n' '\n'
    print 'Updating fits header...'

    iraf.hedit.setParam('images','imcomb.ms.fits')
    iraf.hedit.setParam('fields','REFSPEC1')
    iraf.hedit.setParam('value','aimcomb.fits') # should be wavelength calibrated?
    iraf.hedit.setParam('add','yes')
    iraf.hedit.setParam('verify','yes')
    iraf.hedit.setParam('show','yes')

    iraf.hedit()

    return None
Exemplo n.º 17
0
def thar_ref(object_b_fn_ec, wave_ref, colour):
# Import IRAF modules:
  iraf.noao(_doprint=0)
  iraf.onedspec(_doprint=0)
#  iraf.echelle(_doprint=0)
  parList = "wavelength_calibration_refspectra.par"
  if os.path.isfile(object_b_fn_ec) == True:
    if os.path.isfile(wave_ref) == True:
      if os.path.isfile(parList) == True:
# Assign reference spectra to input object spectra using IRAF task refspectra:
        iraf.refspectra.setParList(ParList="wavelength_calibration_refspectra.par")
        iraf.refspectra(input=object_b_fn_ec, references=wave_ref)
        print ' '
        print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
        print colour.capitalize() + ' object spectrum             '
        print '(' + str(object_b_fn_ec) + ')                      '
        print 'assigned to ' + str(colour) + ' reference spectrum '
        print '(' + str(wave_ref) + ') successfully.              ' 
        print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
        print ' '
      else:
        print ' '
        print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
        print 'Wavelength reference IRAF .par file                ' 
        print str(parList)
        print 'does not exist. Exiting script.                    '
        print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
        print ' '
        print ' '
        sys.exit()
    else:
      print ' '
      print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
      print 'ThAr reference frame                               ' 
      print str(wave_ref)
      print 'does not exist. Exiting script.                    '
      print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
      print ' '
      print ' '
      sys.exit()
  else:
    print ' '
    print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
    print 'Input frame                                        ' 
    print str(object_b_fn_ec)
    print 'does not exist. Exiting script.                    '
    print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
    print ' '
    print ' '
    sys.exit()
Exemplo n.º 18
0
def sensfunc(standards, obs):
    """
    sensfunc -- Determine sensitivity and extinction functions
    sensfunc standards sensitivity
    """
    import glob
    import os, sys
    from pyraf import iraf
    iraf.noao()
    iraf.onedspec()
    output_sens = "sens"
    iraf.sensfunc(standards=standards,
                  sensitivity=output_sens,
                  extinct='/iraf/iraf/noao/lib/onedstds/ctioextinct.dat',
                  observatory=obs)
    iraf.splot(output_sens)
Exemplo n.º 19
0
def spectraresolution3(img0, ww=25):
    import pyfits
    import os, string, re, sys
    import floyds
    from numpy import arange, mean, compress, array, median
    from numpy import interp as ninterp
    from pyraf import iraf

    iraf.onedspec(_doprint=0)

    img = re.sub('arc_', '', img0)
    data, hdr = pyfits.getdata(img0, 0, header=True)
    crvals = floyds.util.readkey3(hdr, 'CRVAL1')
    cds = floyds.util.readkey3(hdr, 'CD1_1')
    xx = arange(len(data))
    yy = data
    aa = crvals + (xx) * cds

    maxtab, mintab = floyds.util.peakdet(yy, median(yy) * 10)
    if len(maxtab) <= 0:      maxtab, mintab = floyds.util.peakdet(yy, median(yy))
    if len(maxtab) <= 0:
        lines = []
    else:
        lines0, b = zip(*maxtab)
        lines = crvals + (array(lines0)) * cds
    if len(lines) > 0:
        lines = compress((aa[0] < array(lines)) & (array(lines) < aa[-1]), array(lines))
        cursor = ''
        yym = ninterp(lines - ww, aa, yy)
        yyp = ninterp(lines + ww, aa, yy)
        for i in range(0, len(lines)):
            cursor = cursor + str(lines[i] - ww) + '  ' + str(yym[i]) + '  1   k\n'
            cursor = cursor + str(lines[i] + ww) + '  ' + str(yyp[i]) + '  1   k\n'
        cursor = cursor + str(lines[i] + ww) + '  ' + str(yyp[i]) + '  1   q\n'
        ff = open('_cursor', 'w')
        ff.write(cursor)
        ff.close()
        from pyraf import iraf

        aaa = iraf.noao.onedspec.bplot(img0, cursor='_cursor', spec2='', new_ima='', overwri='yes', Stdout=1)
        fw = []
        for i in aaa[1:]: fw.append(float(string.split(string.split(i, '=')[-1], 'k')[0]))
        floyds.util.delete('_cursor')
        res = (aa[0] + ((aa[-1] - aa[0]) / 2)) / mean(fw)
    else:
        res = 9999
    return res
Exemplo n.º 20
0
def atmofile(imgstd, imgout=''):
    # print "LOGX:: Entering `atmofile` method/function in %(__file__)s" %
    # globals()
    from pyraf import iraf
    import os
    import ntt

    iraf.noao(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.set(direc=ntt.__path__[0] + '/')
    _cursor = 'direc$standard/ident/cursor_sky_0'
    if not imgout:
        imgout = 'atmo_' + imgstd
    os.system('rm -rf ' + imgout)
    iraf.noao.onedspec.bplot(imgstd, cursor=_cursor,
                             spec2=imgstd, new_ima=imgout, overwri='yes')
    return imgout
Exemplo n.º 21
0
def get_wl_identy(filename, coordlistname):
    iraf.onedspec()
    print 'run get_wl_identy identify...'
    print 'the input file is ' + filename
    print 'implot %s' % filename
    iraf.implot(image = filename)
    print 'run identify...'
    iraf.identify(images = filename
            , section = 'middle line', database = 'database'
            , coordlist = coordlistname, units = '', nsum = 10
            , match = -3.0, maxfeatures = 50, zwidth = 100.0
            , ftype = 'emission', fwidth = 18.0, cradius = 5.0
            , threshold = 0.0, minsep = 2.0, function = 'spline3'
            , order = 1, sample = '*', niterate = 0
            , low_reject = 3.0, high_reject = 3.0, grow = 0.0
            , autowrite = False, graphics = 'stdgraph', cursor = ''
            , crval = '', cdelt = '')
Exemplo n.º 22
0
    def inspect(self, event):
    # Pull up the current TDSS spectrum in IRAF SPLOT
    ####
        global TDSS_fiber_indices, TDSS_fiber_index
        global plate, mjd 

	# load packages; splot is in the onedspec package, which is in noao.
    	# the special keyword _doprint=0 turns off displaying the tasks
    	# when loading a package.
    	iraf.noao(_doprint=0)
    	iraf.onedspec(_doprint=0) 

    	# set/view IRAF task parameter.
    	#iraf.onedspec.splot.save_file = "splot_%s.log" % (root,)

    	# call IRAF task, and specify some parameters.
	iraf.onedspec.splot('spPlate-'+str(plate)+'-'+str(mjd)+'.fits[0][*,'+str(TDSS_fiber_index+1)+']')
Exemplo n.º 23
0
def run_scombine(listin, fn, setup):
    namefil = []
    namelist = open(listin, "r")
    for cols in (raw.strip().split() for raw in namelist):
        namefil.append(cols[0])
    namelist.close()
    obs = []
    count = -1
    for i in range(len(namefil)):
        if namefil[i - 1][namefil[i - 1].index('/') +
                          1:][0] != namefil[i][namefil[i].index('/') + 1:][0]:
            obs.append([])
            count += 1
        obs[count].append(namefil[i])
    spec = []
    for i in range(len(obs[0])):
        spec.append([])
        for j in range(len(obs)):
            Flag = True
            try:
                val = obs[j][i]
                Flag = True
            except IndexError:
                Flag = False
            if Flag:
                spec[i].append(obs[j][i])
        if len(spec[i]) != len(obs):
            spec.pop(i)
    iraf.noao()
    iraf.onedspec()
    for i in range(len(spec)):
        temp = open(setup + '/temp_list', 'w')
        for j in range(len(spec[i])):
            temp.write(spec[i][j] + '\n')
        temp.close()
        longn = spec[i][0]
        specname = longn[(longn.index('/') +
                          1):][(longn[(longn.index('/') + 1):]).index('/') +
                               1:]
        iraf.scombine('@' + setup + '/temp_list',
                      setup + '/' + specname,
                      logfile=setup + '/combine_log')
        fn.append(specname)
Exemplo n.º 24
0
def atmofile(imgstd, imgout=''):

    from pyraf import iraf
    import os
    import ntt

    iraf.noao(_doprint=0, Stdout=0)
    iraf.onedspec(_doprint=0, Stdout=0)
    iraf.set(direc=ntt.__path__[0] + '/')
    _cursor = 'direc$standard/ident/cursor_sky_0'
    if not imgout:
        imgout = 'atmo_' + imgstd
    os.system('rm -rf ' + imgout)
    iraf.noao.onedspec.bplot(imgstd,
                             cursor=_cursor,
                             spec2=imgstd,
                             new_ima=imgout,
                             overwri='yes')
    return imgout
Exemplo n.º 25
0
def calibrate(imlist_name, obs):
    """
    calibrate -- Apply extinction corrections and flux calibrations
    calibrate input output [records]
    """
    import glob
    import os, sys
    from pyraf import iraf
    iraf.noao()
    iraf.onedspec()
    imlist = glob.glob(imlist_name)
    imlist.sort()
    for i in range(len(imlist)):
        inim = imlist[i]
        #iraf.calibrate(input=inim , output='s'+inim, extinct='/iraf/iraf/noao/lib/onedstds/ctioextinct.dat', flux='yes', observatory=obs)
        iraf.calibrate(input=inim,
                       output='s' + inim,
                       extinct='yes',
                       flux='yes',
                       observatory=obs)
Exemplo n.º 26
0
def atmofile(imgstd, imgout=''):
    # print "LOGX:: Entering `atmofile` method/function in %(__file__)s" %
    # globals()
    from pyraf import iraf
    import os
    import ntt

    iraf.noao(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.set(direc=ntt.__path__[0] + '/')
    _cursor = 'direc$standard/ident/cursor_sky_0'
    if not imgout:
        imgout = 'atmo_' + imgstd
    os.system('rm -rf ' + imgout)
    iraf.noao.onedspec.bplot(imgstd,
                             cursor=_cursor,
                             spec2=imgstd,
                             new_ima=imgout,
                             overwri='yes')
    return imgout
Exemplo n.º 27
0
def test(directory='', fileTag=''):
    fluxFiles = sorted(glob.glob(directory+'/*_1_Flux.fits'))
    spectraCount = 0
    # Load the necessary IRAF packages
    iraf.onedspec()
    for thisFile in fluxFiles:
        # Resulting file names will use the self-reported object name
        # (if any) and an index based on the alphabetical order of
        # the original ("HI.xxxxx...") filename. Individual CCD files
        # (ie: HI.xxxx..._[1,2,3]_Flux.fits") will be left separate.
        objName = SD.GetObjectName(thisFile).strip().replace(' ','_')
        starName = '{0}{1}_{2:02d}'.format(fileTag, objName, spectraCount)
        spectraCount += 1
        
        # makee 'linearize' breaks a 2-d spectrum into it's component
        # orders, one order per file.
        makeeCall = kMakeeLinearCall + thisFile
        sub.call(makeeCall, shell=True)
        orderFiles = sorted(glob.glob(directory+'/*_Flux-??.fits'))
        for order in orderFiles:
            orderNo = int(order[-7:-5])
            if starName == '':
                fileRoot = order.split('/')[-1]
                orderFileName = directory+fileRoot
            else:
                orderFileName = directory+'/{0}_{1:02d}.fits'.format(starName, orderNo)
            if orderNo>6:
                u.clearFile(order)
            else:
                os.system('mv {0} {1}'.format(order, orderFileName))
            # Continuum the orders
#            if int(orderNo)<5:
#                iraf.continuum(input=order, output=orderFileName, functio='chebyshev', interac='no', order=5, low_rej=3.0, high_re=3.5, niterat=10)
#            u.clearFile(order)
#        contFileName = directory+'/aC_'+starName+'.fits'
        # ...and stack them back up.
#        iraf.onedspec.scopy(input=directory+'/temp_*', output=contFileName, format='multispec', renumber="yes")
#        filesToClean = glob.glob(directory+'/temp_*')
#        for fn in filesToClean:
#            u.clearFile(fn)
    return
Exemplo n.º 28
0
def extract(packnam):
    obs = packnam[-6]
    pack = pyfits.open(packnam)
    table = pack[1].data
    head = pack[0].header
    setup = head['HIERARCH ESO INS EXP MODE']
    fib = []
    obj = []
    valid_obj = []
    for i in range(len(table)):
        fib.append(table[i][0])
        obj.append(table[i][7])
    iraf.noao()
    iraf.onedspec()
    os.makedirs(setup + '/' + obs)
    for k in range(len(fib)):
        if (obj[k] != 'CALSIM') and (obj[k][0:4] != 'Grid'):
            valid_obj.append(setup + '/' + obs + '/' + obj[k])
            iraf.scopy(packnam + '[' + str(fib[k]) + ',*]',
                       setup + '/' + obs + '/' + obj[k])
    return valid_obj, setup
Exemplo n.º 29
0
def wspectext(spec_in, ascii_out, header=False):
    """
    Convert 1D image spectra to an ascii text spectra.

    Parameters
    ----------

    spec_in: str;
        Name of spectrum to be converted.

    ascii_out: str;
        Name of output ascii file.

    header: bool;
        Set header on file if True.
    """
    iraf.onedspec(_doprint=0)
    iraf.onedspec.wspectext.unlearn()
    if header:
        iraf.onedspec.wspectext.header = 'yes'
    iraf.onedspec.wspectext.input = spec_in
    iraf.onedspec.wspectext.output = ascii_out
    iraf.onedspec.wspectext(mode='h')
Exemplo n.º 30
0
def sflip(imlist_name):
    """
    아크 이미지랑 사이언스 이미지랑 파장 방향이 다를때, 스펙트럼을 뒤집는다.
    세로로 되어 있는 이미지는 뒤집히나마나이므로, 가로방향으로 나오는 1d spectrum을 뒤집으면 된다.

    sflip -- Flip data and/or dispersion coordinates in spectra
    sflip input output

    """
    import glob
    import os, sys
    from pyraf import iraf
    iraf.noao()
    iraf.onedspec()
    imlist = glob.glob(imlist_name)
    imlist.sort()
    for i in range(len(imlist)):
        inim = imlist[i]
        print('Flip spectrum')
        iraf.sflip(input=inim,
                   output='r' + inim,
                   coord_flip='yes',
                   data_flip='yes')
def scopy_flux(flux_sci, flux_scopy_fits, flux_scopy_range, flux_scopy_file):
    """
    Combine (average) all spectra (according to bin) in the image for a given spectral range, calculate mean flux.
    This is used for 1 mag contours when plotting the velocity fields.
    INPUT: FLUX_SCI, FLUX_SCOPY_FITS, FLUX_SCOPY_RANGE
    OUTPUT: FLUX_SCOPY_FILE
    """

    if os.path.exists(flux_scopy_file):
        print('File {} already exists'.format(flux_scopy_file))
        return

    files_in_dir = glob.glob(flux_sci.format('*'))
    assert len(files_in_dir) > 0, 'No files match {}'.format(flux_sci.format('*'))

    from pyraf import iraf

    iraf.noao()
    iraf.onedspec()

    flux_scopy_fits_i_data_mean = []

    for i in range(len(files_in_dir)):

        flux_sci_i = flux_sci.format(i)
        flux_scopy_fits_i = flux_scopy_fits.format(i)

        if not os.path.exists(flux_scopy_fits_i):
            iraf.scopy(flux_sci_i, flux_scopy_fits_i, w1=flux_scopy_range[0], w2=flux_scopy_range[1])

        flux_scopy_fits_i_data = fits.getdata(flux_scopy_fits_i, 0)
        assert flux_scopy_fits_i_data.ndim != 0, "Scrop'd array is empty"

        flux_scopy_fits_i_data_mean.append(flux_scopy_fits_i_data.mean())

    np.array(flux_scopy_fits_i_data_mean).tofile(flux_scopy_file, sep='\n')
Exemplo n.º 32
0
def main():

    # Parse input arguments
    usage = 'usage: %prog [options] targets'
    p = optparse.OptionParser(usage=usage, version='v'+VERSION)
    p.add_option('--debug', action='store_true', help='toggle debug messages')
    p.add_option('--verbose', '-v', action='store_true', help='toggle on verbose mode')
    p.add_option('--tlist', '-l', action='store', type='string', dest='tlist', help='Target list')
    
    (options, args) = p.parse_args()
    
    if options.debug:
        options.verbose = True
        print 'options: ', options
        print 'args: ', args
    
    (validTargets, invalidTarget) = getTargets(args, options.tlist)
    specdict = getSpectra(validTargets)
    
    onedspec()
    for target in validTargets:
        print target
        for spectrum in specdict[target]:
            onedspec.splot(spectrum)
Exemplo n.º 33
0
def main():

    description = "> Performs pre-reduction steps"
    usage = "%prog    \t [option] \n Recommended syntax: %prog -i -c"

    parser = OptionParser(usage=usage, description=description, version="0.1")
    option, args = parser.parse_args()

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.specred(_doprint=0)

    iraf.ccdred.verbose = 'no'
    iraf.specred.verbose = 'no'
    iraf.ccdproc.darkcor = 'no'
    iraf.ccdproc.fixpix = 'no'
    iraf.ccdproc.flatcor = 'no'
    iraf.ccdproc.zerocor = 'no'
    iraf.ccdproc.ccdtype = ''

    iraf.longslit.mode = 'h'
    iraf.specred.mode = 'h'
    iraf.noao.mode = 'h'
    iraf.ccdred.instrument = "ccddb$kpno/camera.dat"

    mkarc = raw_input("Make arc? ([y]/n): ")
    mkflat = raw_input("Make flat? ([y]/n): ")

    if len(args) > 1:
        files = []
        sys.argv.append('--help')
        option, args = parser.parse_args()
        sys.exit()
    elif len(args) == 1:
        files = util.readlist(args[0])
        sys.exit()
    else:
        listfile = glob.glob('*.fits')
        files_science = []
        files_arc = []
        files_dflat = []
        #print 'checking your files ...'
        for img in listfile:
            _type = ''
            hdr0 = util.readhdr(img)
            _type = util.readkey3(hdr0, 'object')
            if 'flat' in _type.lower():
                files_dflat.append(img)
            elif 'arc' not in _type.lower() and 'arc' not in img.lower():
                files_science.append(img)
        if mkarc != 'n':
            mkarc_b = raw_input(
                "List blue arc files to combine (.fits will be added): "
            ).split()
            mkarc_r = raw_input(
                "List red arc files to combine (.fits will be added): ").split(
                )
            for arc in mkarc_b:
                files_arc.append(arc + '.fits')
            for arc in mkarc_r:
                files_arc.append(arc + '.fits')

    if mkarc != 'n':
        list_arc_b = []
        list_arc_r = []
        for arcs in files_arc:
            if instruments.blue_or_red(arcs)[0] == 'blue':
                list_arc_b.append(arcs)
            elif instruments.blue_or_red(arcs)[0] == 'red':
                list_arc_r.append(arcs)
            else:
                sys.exit()

    if mkflat != 'n':
        list_flat_b = []
        list_flat_r = []
        for dflats in files_dflat:
            if instruments.blue_or_red(dflats)[0] == 'blue':
                list_flat_b.append(dflats)
            elif instruments.blue_or_red(dflats)[0] == 'red':
                list_flat_r.append(dflats)
            else:
                sys.exit()

    # make pre_reduced if it doesn't exist
    if not os.path.isdir('pre_reduced/'):
        os.mkdir('pre_reduced/')

    # log the existing processed files (need to verify this works if pre_reduced is empty...)
    pfiles = []
    new_files = []
    for root, dirnames, filenames in os.walk('pre_reduced'):
        for file in filenames:
            if file.startswith('to'):
                pfiles.append(file)
    print(pfiles)

    # loop over each image in pre_reduced
    for img in listfile:
        hdr = util.readhdr(img)
        targ = util.readkey3(hdr, 'object')

        # if file is not not a processed file, run the overscan+trim code
        if 'to' + img not in pfiles:

            # if the file is a science file, grab the name for later
            if 'arc' not in targ.lower() and 'flat' not in targ.lower():
                new_files.append(img)
                print('Adding data for: ' + targ)

            inst = instruments.blue_or_red(img)[1]

            iraf.specred.dispaxi = inst.get('dispaxis')
            iraf.longslit.dispaxi = inst.get('dispaxis')

            _biassec0 = inst.get('biassec')
            _trimsec0 = inst.get('trimsec')

            ######################################################################
            #
            # JB: this chunk of code needs attention
            # It seems incredibly hacky for anything but Kast...
            #
            # overscan
            if not img.startswith('o') and inst.get('observatory') == 'lick':
                if os.path.isfile('pre_reduced/o' + img):
                    os.remove('pre_reduced/o' + img)
                util.kastbias(img, 'pre_reduced/o' + img)
            elif not img.startswith('o') and inst.get('observatory') != 'lick':
                if os.path.isfile('pre_reduced/o' + img):
                    os.remove('pre_reduced/o' + img)
                os.system('cp ' + img + ' ' + 'pre_reduced/' + img)

            # trim
            if not img.startswith('t') and inst.get('observatory') == 'lick':
                if os.path.isfile('pre_reduced/to' + img):
                    os.remove('pre_reduced/to' + img)
                iraf.ccdproc('pre_reduced/o' + img,
                             output='pre_reduced/to' + img,
                             overscan='no',
                             trim='yes',
                             zerocor="no",
                             flatcor="no",
                             readaxi='line',
                             trimsec=str(_trimsec0),
                             Stdout=1)

            elif not img.startswith('t') and inst.get('observatory') != 'lick':
                if os.path.isfile('pre_reduced/to' + img):
                    os.remove('pre_reduced/to' + img)
                iraf.ccdproc('pre_reduced/' + img,
                             output='pre_reduced/to' + img,
                             overscan='yes',
                             trim='yes',
                             zerocor="no",
                             flatcor="no",
                             readaxi='line',
                             trimsec=str(_trimsec0),
                             biassec=str(_biassec0),
                             Stdout=1)

    # combine the arcs
    if mkarc != 'n':

        # blue arcs
        if len(list_arc_b) > 0:
            if len(list_arc_b) == 1:
                arc_blue = list_arc_b[0]
                os.system('cp ' + 'pre_reduced/to' + arc_blue + ' ' +
                          'pre_reduced/ARC_blue.fits')
            else:
                arc_str = ''
                for arc in list_arc_b:
                    arc_str = arc_str + 'pre_reduced/to' + arc + ','
                if os.path.isfile('pre_reduced/ARC_blue.fits'):
                    os.remove('pre_reduced/ARC_blue.fits')
                iraf.imcombine(arc_str, output='pre_reduced/ARC_blue.fits')

        # red arcs
        if len(list_arc_r) > 0:
            if len(list_arc_r) == 1:
                arc_red = list_arc_r[0]
                os.system('cp ' + 'pre_reduced/to' + arc_red + ' ' +
                          'pre_reduced/ARC_red.fits')
            else:
                arc_str = ''
                for arc in list_arc_r:
                    arc_str = arc_str + 'pre_reduced/to' + arc + ','
                if os.path.isfile('pre_reduced/ARC_red.fits'):
                    os.remove('pre_reduced/ARC_red.fits')
                iraf.imcombine(arc_str, output='pre_reduced/ARC_red.fits')

    # combine the flats
    if mkflat != 'n':
        inter = 'yes'

        # blue flats
        if len(list_flat_b) > 0:
            br, inst = instruments.blue_or_red(list_flat_b[0])
            iraf.specred.dispaxi = inst.get('dispaxis')
            if len(list_flat_b) == 1:
                # Flat_blue = 'pre_reduced/to'+ list_flat_b[0]
                Flat_blue = list_flat_b[0]
            else:
                flat_str = ''
                for flat in list_flat_b:
                    flat_str = flat_str + 'pre_reduced/to' + flat + ','
                #subsets = 'no'
                if os.path.isfile('pre_reduced/toFlat_blue'):
                    os.remove('pre_reduced/toFlat_blue')
                iraf.flatcombine(flat_str,
                                 output='pre_reduced/toFlat_blue',
                                 ccdtype='',
                                 rdnoise=3.7,
                                 subsets='no',
                                 process='no')
                Flat_blue = 'Flat_blue.fits'

            #What is the output here? Check for overwrite
            iraf.specred.response('pre_reduced/to' + Flat_blue,
                                  normaliz='pre_reduced/to' + Flat_blue,
                                  response='pre_reduced/RESP_blue',
                                  interac=inter,
                                  thresho='INDEF',
                                  sample='*',
                                  naverage=2,
                                  function='legendre',
                                  low_rej=3,
                                  high_rej=3,
                                  order=60,
                                  niterat=20,
                                  grow=0,
                                  graphic='stdgraph')

        # red flats
        if len(list_flat_r) > 0:
            br, inst = instruments.blue_or_red(list_flat_r[0])
            iraf.specred.dispaxi = inst.get('dispaxis')
            if len(list_flat_r) == 1:
                # Flat_red = 'pre_reduced/to' + list_flat_r[0]
                Flat_red = list_flat_r[0]
            else:
                flat_str = ''
                for flat in list_flat_r:
                    flat_str = flat_str + 'pre_reduced/to' + flat + ','
                if os.path.isfile('pre_reduced/toFlat_red'):
                    os.remove('pre_reduced/toFlat_red')
                iraf.flatcombine(flat_str,
                                 output='pre_reduced/toFlat_red',
                                 ccdtype='',
                                 rdnoise=3.8,
                                 subsets='yes',
                                 process='no')
                Flat_red = 'Flat_red.fits'

            #What is the output here? Check for overwrite
            iraf.specred.response('pre_reduced/to' + Flat_red,
                                  normaliz='pre_reduced/to' + Flat_red,
                                  response='pre_reduced/RESP_red',
                                  interac=inter,
                                  thresho='INDEF',
                                  sample='*',
                                  naverage=2,
                                  function='legendre',
                                  low_rej=3,
                                  high_rej=3,
                                  order=80,
                                  niterat=20,
                                  grow=0,
                                  graphic='stdgraph')

    # science files should have 't' in front now
    # this just gets the base name, to prefix assumed below
    if new_files is not None:
        files_science = new_files

    # get all the science objects for the night
    science_targets = []
    for obj in files_science:
        hdr = util.readhdr(obj)
        _type = util.readkey3(hdr, 'object')
        science_targets.append(_type)

    # make a dir for each sci object
    science_targets = set(science_targets)
    for targ in science_targets:
        if not os.path.isdir('pre_reduced/' + targ + '/'):
            os.mkdir('pre_reduced/' + targ + '/')

    # copy the files into the obj dir
    for obj in files_science:
        hdr = util.readhdr(obj)
        targ = util.readkey3(hdr, 'object')
        if not obj.startswith('to'):
            os.system('cp ' + 'pre_reduced/to' + obj + ' ' + 'pre_reduced/' +
                      targ + '/')
        else:
            os.system('cp ' + 'pre_reduced/' + obj + ' ' + 'pre_reduced/' +
                      targ + '/')

    rawfiles = glob.glob('*.fits')
    ofiles = glob.glob('pre_reduced/o' + '*.fits')
    tfiles = glob.glob('pre_reduced/to' + '*.fits')

    # delete raw files from the pre_reduced dir
    # there shouldn't be any there though?
    # maybe if the overscan isn't implemented for that detector
    for img in rawfiles:
        util.delete('pre_reduced/' + img)

    # delete the ofiles from pre_reduced dir
    for img in ofiles:
        util.delete(img)
Exemplo n.º 34
0
def reduce(imglist,files_arc, _cosmic, _interactive_extraction,_arc):

    import string
    import os
    import re
    import sys
    os.environ["PYRAF_BETA_STATUS"] = "1"
    try:      from astropy.io import fits as pyfits
    except:   import   pyfits
    import numpy as np
    import util
    import instruments
    import combine_sides as cs
    import cosmics
    from pyraf import iraf

    dv = util.dvex()
    scal = np.pi / 180.
    
    if not _interactive_extraction:
        _interactive = False
    else:
        _interactive = True

    if not _arc:
        _arc_identify = False
    else:
        _arc_identify = True

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.specred(_doprint=0)
    iraf.disp(inlist='1', reference='1')

    toforget = ['ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard',
                'longslit.fitcoords', 'onedspec.wspectext']
    for t in toforget:
        iraf.unlearn(t)
    iraf.ccdred.verbose = 'no'
    iraf.specred.verbose = 'no'
    iraf.ccdproc.darkcor = 'no'
    iraf.ccdproc.fixpix = 'no'
    iraf.ccdproc.flatcor = 'no'
    iraf.ccdproc.zerocor = 'no'
    iraf.ccdproc.ccdtype = ''

    iraf.longslit.mode = 'h'
    iraf.specred.mode = 'h'
    iraf.noao.mode = 'h'
    iraf.ccdred.instrument = "ccddb$kpno/camera.dat"

    list_arc_b = []
    list_arc_r = []

    for arcs in files_arc:
        hdr = util.readhdr(arcs)
        if util.readkey3(hdr, 'VERSION') == 'kastb':
            list_arc_b.append(arcs)
        elif util.readkey3(hdr, 'VERSION') == 'kastr':
            list_arc_r.append(arcs)
        else:
            print util.readkey3(hdr, 'VERSION') + 'not in database'
            sys.exit()
    
    asci_files = []
    newlist = [[],[]]

    print '\n### images to reduce :',imglist
    #raise TypeError
    for img in imglist:
        if 'b' in img:
            newlist[0].append(img)
        elif 'r' in img:
            newlist[1].append(img)

    if len(newlist[1]) < 1:
        newlist = newlist[:-1]
    
    for imgs in newlist:
        hdr = util.readhdr(imgs[0])
        if util.readkey3(hdr, 'VERSION') == 'kastb':
            inst = instruments.kast_blue
        elif util.readkey3(hdr, 'VERSION') == 'kastr':
            inst = instruments.kast_red
        else:
            print util.readkey3(hdr, 'VERSION') + 'not in database'
            sys.exit()

        iraf.specred.dispaxi = inst.get('dispaxis')
        iraf.longslit.dispaxi = inst.get('dispaxis')

        _gain = inst.get('gain')
        _ron = inst.get('read_noise')
        iraf.specred.apall.readnoi = _ron
        iraf.specred.apall.gain = _gain

        _object0 = util.readkey3(hdr, 'OBJECT')
        _date0 = util.readkey3(hdr, 'DATE-OBS')


        _biassec0 = inst.get('biassec')
        _trimsec0 = inst.get('trimsec')

        _object0 = re.sub(' ', '', _object0)
        _object0 = re.sub('/', '_', _object0)
        nameout0 = str(_object0) + '_' + inst.get('name') + '_' + str(_date0)

        nameout0 = util.name_duplicate(imgs[0], nameout0, '')
        timg = nameout0
        print '\n### now processing :',timg,' for -> ',inst.get('name')
        if len(imgs) > 1:
            img_str = ''
            for i in imgs:
                img_str = img_str + i + ','
            iraf.imcombine(img_str, output=timg)
        else:
            img = imgs[0]
            if os.path.isfile(timg):
                os.system('rm -rf ' + timg)
            iraf.imcopy(img, output=timg)
        
        zero_file = inst.get('archive_zero_file')
        os.system('cp ' + zero_file + ' .')
        zero_file = string.split(zero_file, '/')[-1]
        
        flat_file = inst.get('archive_flat_file')
        os.system('cp ' + flat_file + ' .')
        flat_file = string.split(flat_file, '/')[-1]
        
        iraf.ccdproc(timg, output='', overscan='yes', trim='yes', zerocor="no", flatcor="no", readaxi='line',
                     trimsec=str(_trimsec0),biassec=str(_biassec0), Stdout=1)

        iraf.ccdproc(timg, output='', overscan='no', trim='no', zerocor="yes", flatcor="no", readaxi='line',
                     zero=zero_file,order=3, Stdout=1)
        iraf.ccdproc(timg, output='', overscan='no', trim='no', zerocor="no", flatcor="yes", readaxi='line',
                     flat=flat_file, Stdout=1)

        img = timg

        #raw_input("Press Enter to continue...")
        print '\n### starting cosmic removal'
        if _cosmic:
            array, header = cosmics.fromfits(img)
            c = cosmics.cosmicsimage(array, gain=inst.get('gain'), readnoise=inst.get('read_noise'), sigclip = 4.5, sigfrac = 0.5, objlim = 1.0)
            c.run(maxiter = 4)
            cosmics.tofits('cosmic_' + img, c.cleanarray, header)

        print '\n### cosmic removal finished'

        img='cosmic_' + img

        if inst.get('name') == 'kast_blue':
            arcfile = list_arc_b[0]
        elif inst.get('name') == 'kast_red':
            arcfile = list_arc_r[0]
        
        if not arcfile.endswith(".fits"):
            arcfile=arcfile+'.fits'

        if os.path.isfile(arcfile):
            util.delete('t' + arcfile)
            iraf.ccdproc(arcfile, output= 't' + arcfile, overscan='yes', trim='yes', zerocor="no", flatcor="no",
                         readaxi='line', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1)
            arcfile = 't' + arcfile
        else:
            print '\n### warning no arcfile \n exit '
            sys.exit()

        if not os.path.isdir('database/'):
                os.mkdir('database/')
        
        if _arc_identify:
            arc_ex=re.sub('.fits', '.ms.fits', arcfile)
            print '\n### arcfile : ',arcfile
            print '\n### arcfile extraction : ',arc_ex
            iraf.specred.apall(arcfile, output='', line = 'INDEF', nsum=10, interactive='no', extract='yes',find='yes', nfind=1 ,format='multispec', trace='no',back='no',recen='no')
            iraf.longslit.identify(images=arc_ex, section=inst.get('section'),coordli=inst.get('line_list'),function = 'spline3',order=3, mode='h')
        else:
            arcref = inst.get('archive_arc_extracted')
            arcrefid = inst.get('archive_arc_extracted_id')
            os.system('cp ' + arcref + ' .')
            arcref = string.split(arcref, '/')[-1]
            os.system('cp ' + arcrefid + ' ./database')

            arc_ex=re.sub('.fits', '.ms.fits', arcfile)

            print '\n###  arcfile : ',arcfile
            print '\n###  arcfile extraction : ',arc_ex
            print '\n###  arc referenece : ',arcref
            iraf.specred.apall(arcfile, output=arc_ex, line = 'INDEF', nsum=10, interactive='no', extract='yes',find='yes', nfind=1 ,format='multispec', trace='no',back='no',recen='no')

            iraf.longslit.reidentify(referenc=arcref, images=arc_ex, interac='NO', section=inst.get('section'), 
                                    coordli=inst.get('line_list'), shift='INDEF', search='INDEF',
                                    mode='h', verbose='YES', step=0,nsum=5, nlost=2, cradius=10, refit='yes',overrid='yes',newaps='no')
        
        #print '\n### checking sky lines '
        #_skyfile = inst.get('sky_file')
        #shift = util.skyfrom2d(img, _skyfile,'True')
        #print '\n### I found a shift of : ',shift

        print '\n### extraction using apall'
        result = []
        hdr_image = util.readhdr(img)
        _type=util.readkey3(hdr_image, 'object')

        if _type.startswith("arc") or _type.startswith("dflat") or _type.startswith("Dflat") or _type.startswith("Dbias") or _type.startswith("Bias"):
            print '\n### warning problem \n exit '
            sys.exit()
        else:
            imgex = util.extractspectrum(
                img, dv, inst, _interactive, 'obj')
            print '\n### applying wavelength solution'
            iraf.disp(inlist=imgex, reference=arc_ex)   
            sensfile = inst.get('archive_sens')
            os.system('cp ' + sensfile + ' .')
            sensfile = string.split(sensfile, '/')[-1]
            if sensfile:
                print '\n### sensitivity function : ',sensfile
                imgf = re.sub('.fits', '_f.fits', img)
                _extinction = inst.get('extinction_file')
                _observatory = inst.get('observatory')
                _exptime = util.readkey3(hdr, 'EXPTIME')
                _airmass = util.readkey3(hdr, 'AIRMASS')
                util.delete(imgf)
                dimgex='d'+imgex
                iraf.specred.calibrate(input=dimgex, output=imgf, sensiti=sensfile, extinct='yes',
                                        extinction=_extinction,flux='yes', ignorea='yes', airmass=_airmass, exptime=_exptime,
                                        fnu='no')
                imgout = imgf
                imgasci = re.sub('.fits', '.asci', imgout)
                errasci = re.sub('.fits', '_err.asci', imgout)
                util.delete(imgasci)
                iraf.onedspec.wspectext(imgout + '[*,1,1]', imgasci, header='no')
                iraf.onedspec.wspectext(imgout + '[*,1,4]', errasci, header='no')
                spec = np.transpose(np.genfromtxt(imgasci))
                err = np.transpose(np.genfromtxt(errasci))
                util.delete(errasci)
                final = np.transpose([spec[0], spec[1], err[1]])
                np.savetxt(imgasci, final)

                result = result + [imgout, imgasci]

        result = result + [imgex] + [timg]
       
        asci_files.append(imgasci)
        if not os.path.isdir(_object0 + '/'):
            os.mkdir(_object0 + '/')
            for img in result:
                os.system('mv ' + img + ' ' + _object0 + '/')
        else:
            for img in result:
                os.system('mv ' + img + ' ' + _object0 + '/')
        
        if not _arc_identify:
            util.delete(arcref)
        util.delete(sensfile)
        util.delete(zero_file)
        util.delete(flat_file)
        util.delete(arc_ex)
        util.delete(arcfile)
        util.delete('logfile')
        util.delete(dimgex)
        util.delete('cosmic_*')
    print '\n### now i will merge ...'
    if len(asci_files) > 1:
        final = cs.combine_blue_red(asci_files[0], asci_files[1], _object0)
    print '\n### final result in folder ',_object0,' is ',_object0+'_merged.asci'
    return result
Exemplo n.º 35
0
def combineARCSpectra(args):

    import sys
    import glob
    import os
    from pyraf import iraf
    from shutil import copyfile
    from operator import itemgetter
    import pandas as pd

    # Setup relavent IRAF tasks...
    iraf.image()
    iraf.imutil()
    iraf.immatch()
    iraf.onedspec()

    # Grab list of file names from argument list
    #  and convert it into a python list...
    #fnameList = 'WDC0442-0536.0086b.ms.fits,WDC0442-0536.0087b.ms.fits,WDC0442-0536.0088b.ms.fits'
    fnameList = args.fileList
    fnameList = fnameList.split(',')

    # Clean up file name list to exclude any excess blank spaces...
    fileNameList = []
    for fname in fnameList:
        fileName = fname.strip()
        fileNameList.append(fileName)

    # Start a new file name list, for the imsliced
    #  files to be imcombined...
    newFileNameList = []

    # Loop through the input file name list...
    for fileName in fileNameList:

        print fileName

        # Create an output file name...
        baseFileName = os.path.splitext(fileName)[0] + '.tmp'
        outputFileName = baseFileName + '.fits'

        # Delete the any temporary files called tempA00?.fits,
        #  and then run imslice on fileName...
        iraf.imdel('tempA00?.fits')
        iraf.imslice(fileName, 'tempA', 2)

        # Delete the any temporary files called tempB00?.fits,
        #  and then run imslice on tempA001.fits (an output
        #  from the first imslice)...
        iraf.imdel('tempB00?.fits')
        iraf.imslice('tempA001.fits', 'tempB', 2)

        # Rename the output file tempB001.fits from the second
        #  imslice procedure; tempB001.fits contains the 1D
        #  version of the target spectrum. (The other file,
        #  tempB002.fits, which we ignore, contains the 1D
        #  version of the sky background spectrum.)
        os.rename('tempB001.fits', outputFileName)

        # Append the output file name to the new file name list...
        newFileNameList.append(outputFileName)

        # Clean up unneeded temporary files...
        iraf.imdel('tempA00?.fits')
        iraf.imdel('tempB00?.fits')

    # Print the list of file names that will be fed to imcombine...
    print newFileNameList

    # Here, we convert the python list into an IRAF list...
    inlist = ''
    for newFileName in newFileNameList:
        inlist = inlist + ',' + newFileName
    inlist = inlist[1:]
    print inlist

    # Run imcombine to create a median-combined spectrum
    #  and its sigma image...
    medianFile = newFileNameList[0].split('.')[0] + '.median.b.ms.fits'
    sigmaFile = 'sigma-' + newFileNameList[0].split(
        '.')[0] + '.median.b.ms.fits'
    iraf.imdel(medianFile)
    iraf.imdel(sigmaFile)
    iraf.imcombine(inlist,
                   medianFile,
                   combine='median',
                   scale='median',
                   sigma=sigmaFile)

    # Create ASCII text file equivalents...
    medianFileTxt = newFileNameList[0].split('.')[0] + '.median.b.ms.txt'
    sigmaFileTxt = 'sigma-' + newFileNameList[0].split(
        '.')[0] + '.median.b.ms.txt'
    iraf.onedspec.wspectext(medianFile, medianFileTxt)
    iraf.onedspec.wspectext(sigmaFile, sigmaFileTxt)

    # Combine median and sigma text files...
    df1 = pd.read_csv(medianFileTxt,
                      header=None,
                      names=['wave', 'flux'],
                      delim_whitespace=True)
    df2 = pd.read_csv(sigmaFileTxt,
                      header=None,
                      names=['wave', 'flux_err'],
                      delim_whitespace=True)
    df12 = df1.merge(df2, on='wave')
    outputFile = newFileNameList[0].split('.')[0] + '.median.flm'
    df12.to_csv(outputFile, index=False, sep=' ')

    # Run imcombine to create a mean-combined spectrum
    #  and its sigma image...
    meanFile = newFileNameList[0].split('.')[0] + '.mean.b.ms.fits'
    sigmaFile = 'sigma-' + newFileNameList[0].split('.')[0] + '.mean.b.ms.fits'
    iraf.imdel(meanFile)
    iraf.imdel(sigmaFile)
    iraf.imcombine(inlist,
                   meanFile,
                   combine='average',
                   scale='median',
                   sigma=sigmaFile)

    # Create ASCII text file equivalents...
    meanFileTxt = newFileNameList[0].split('.')[0] + '.mean.b.ms.txt'
    sigmaFileTxt = 'sigma-' + newFileNameList[0].split(
        '.')[0] + '.mean.b.ms.txt'
    iraf.onedspec.wspectext(meanFile, meanFileTxt)
    iraf.onedspec.wspectext(sigmaFile, sigmaFileTxt)

    # Combine mean and sigma text files...
    df1 = pd.read_csv(meanFileTxt,
                      header=None,
                      names=['wave', 'flux'],
                      delim_whitespace=True)
    df2 = pd.read_csv(sigmaFileTxt,
                      header=None,
                      names=['wave', 'flux_err'],
                      delim_whitespace=True)
    df12 = df1.merge(df2, on='wave')
    outputFile = newFileNameList[0].split('.')[0] + '.mean.flm'
    df12.to_csv(outputFile, index=False, sep=' ')

    return 0
Exemplo n.º 36
0
def load_modules():
    # Define a function to load all of the modules so that they don't' import 
    # unless we need them
    global iraf
    from pyraf import iraf
    iraf.pysalt()
    iraf.saltspec()
    iraf.saltred()
    iraf.set(clobber='YES')
    
    global sys
    import sys

    global os
    import os

    global shutil
    import shutil

    global glob
    from glob import glob
    
    global pyfits
    import pyfits

    global np
    import numpy as np
    
    global lacosmicx
    import lacosmicx
    
    global interp
    from scipy import interp
    
    global signal
    from scipy import signal
    
    global ndimage
    from scipy import ndimage
    
    global interpolate
    from scipy import interpolate
    
    global WCS
    from astropy.wcs import WCS
    
    global optimize
    from scipy import optimize
    
    global ds9
    import pyds9 as ds9
    
    global GaussianProcess
    from sklearn.gaussian_process import GaussianProcess
    
    global pandas
    import pandas
    
    iraf.onedspec()
    iraf.twodspec()
    iraf.longslit()
    iraf.apextract()
    iraf.imutil()
    iraf.rvsao(motd='no')
Exemplo n.º 37
0
    lamplist = [os.path.basename(x) for x in glob.glob(
        dbpath + '/lamp_XL*fits')]
    linelists = 'linelists$fear.dat'
elif teles == "LJT":
    print("Settings for LJT will be used.")
    lamplist = [os.path.basename(x) for x in glob.glob(
        dbpath + '/lamp_LJ*fits')]
    linelists = 'linelists$henear.dat'
else:
    print("Error detected.")

print('Possible lamp spectrum(a) for references:\n' +
      ", ".join(p for p in lamplist))
refspec = str(
    raw_input("Enter filename of the lamp spectrum you want to use: "))
refspec1 = refspec.strip('.fits') + '.fits'
copy2(dbpath + '/' + refspec1, CWD)
copy2(dbpath + '/id' + refspec.strip('.fits'), CWD + '/database')
iraf.onedspec()
iraf.onedspec.reidentify.unlearn()
# iraf.onedspec.reidentify.fwidth = 10
iraf.onedspec.reidentify.coordli = linelists
iraf.onedspec.reidentify(reference=refspec1,
                         images='af*fits')

iraf.onedspec.identify.unlearn()
iraf.onedspec.identify.fwidth = 10
iraf.onedspec.identify.coordli = linelists
iraf.onedspec.identify(images='af*fits')
print('---DONE---')
Exemplo n.º 38
0
# <codecell>

flat

# <codecell>

#create flats with different aperture widths
iraf.apall(input=flat, output='flat_1', referen=flat, format='multispec', interac='no', find='no', recenter='no', resize='yes', edit='no', trace='no', fittrac='no', extract='yes', extras='no', review='no', line=2000, lower=-1, upper=1, bkg='no', nsum=-10, ylevel="INDEF", llimit=-1, ulimit=1,Stdout="/dev/null")
# iraf.apall(input='tmp/masterflat.fits', output='tmp/flat_2', referen='tmp/masterflat.fits', format='multispec', interac='no', find='no', recenter='no', resize='yes', edit='no', trace='no', fittrac='no', extract='yes', extras='no', review='no', line=2000, lower=-2, upper=2, bkg='no', nsum=-10, ylevel="INDEF", llimit=-2, ulimit=2,Stdout="/dev/null")
# iraf.apall(input='tmp/masterflat.fits', output='tmp/flat_3', referen='tmp/masterflat.fits', format='multispec', interac='no', find='no', recenter='no', resize='yes', edit='no', trace='no', fittrac='no', extract='yes', extras='no', review='no', line=2000, lower=-3, upper=3, bkg='no', nsum=-10, ylevel="INDEF", llimit=-3, ulimit=3,Stdout="/dev/null")

# <codecell>

#normalize different flats
iraf.noao(_doprint=0,Stdout="/dev/null")
iraf.onedspec(_doprint=0,Stdout="/dev/null")
iraf.continuum(input=flat, output='flat_1_norm', lines='*', bands='*', type='ratio', wavescale='no', interac='no', sample='1:4095', functio='spline3', order=13, low_rej=2, high_rej=2, niter=10,Stdout="/dev/null")
# iraf.continuum(input='tmp/flat_2', output='tmp/flat_2_norm', lines='*', bands='*', type='ratio', wavescale='no', interac='no', sample='1:4095', functio='spline3', order=13, low_rej=2, high_rej=2, niter=10,Stdout="/dev/null")
# iraf.continuum(input='tmp/flat_3', output='tmp/flat_3_norm', lines='*', bands='*', type='ratio', wavescale='no', interac='no', sample='1:4095', functio='spline3', order=13, low_rej=2, high_rej=2, niter=10,Stdout="/dev/null")

# <codecell>

#combine normalized flats
# iraf.images(_doprint=0,Stdout="/dev/null")
# iraf.imutil(_doprint=0,Stdout="/dev/null")
# iraf.imarith(operand1='tmp/flat_1_norm', op='*', operand2=0.43, result='tmp/flat_1_norm2',Stdout="/dev/null")
# iraf.imarith(operand1='tmp/flat_2_norm', op='*', operand2=0.41, result='tmp/flat_2_norm2',Stdout="/dev/null")
# iraf.imarith(operand1='tmp/flat_3_norm', op='*', operand2=0.16, result='tmp/flat_3_norm2',Stdout="/dev/null")

# <codecell>
Exemplo n.º 39
0
os.chdir(folderroot)
import numpy as np
from astropy.io import fits
from matplotlib import pyplot as plt
import shutil

import sys
from pyraf import iraf
iraf.noao(_doprint=0)
iraf.imred(_doprint=0)
iraf.ccdred(_doprint=0)
iraf.twodspec(_doprint=0)
iraf.longslit(_doprint=0)
iraf.kpnoslit(_doprint=0)
iraf.astutil(_doprint=0)
iraf.onedspec(_doprint=0)
iraf.twodspec.longslit.dispaxis = 2

#read object keywords
for file in os.listdir(os.getcwd()):
    if file.endswith('.fits'):
        testfile = file


hduo = fits.open(testfile)

#name targets (science & standard)
target = hduo[0].header['OBJECT']
#target2 = 'SP0644p375'
#std = 'SP0305+261'
Exemplo n.º 40
0
def efoscfastredu(imglist, _listsens, _listarc, _ext_trace, _dispersionline, _cosmic, _interactive):
    # print "LOGX:: Entering `efoscfastredu` method/function in %(__file__)s"
    # % globals()
    import string
    import os
    import re
    import sys
    os.environ["PYRAF_BETA_STATUS"] = "1"
    try:      from astropy.io import fits as pyfits
    except:   import   pyfits
    from ntt.util import readhdr, readkey3
    import ntt
    import numpy as np
    dv = ntt.dvex()
    scal = np.pi / 180.
    if not _interactive:
        _interactive = False
        _inter = 'NO'
    else:
        _inter = 'YES'
    from pyraf import iraf

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.specred(_doprint=0)
    toforget = ['ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard',
                'longslit.fitcoords', 'onedspec.wspectext']
    for t in toforget:
        iraf.unlearn(t)
    iraf.ccdred.verbose = 'no'  # not print steps
    iraf.specred.verbose = 'no'  # not print steps
    iraf.ccdproc.darkcor = 'no'
    iraf.ccdproc.fixpix = 'no'
    iraf.ccdproc.flatcor = 'no'
    iraf.ccdproc.zerocor = 'no'
    iraf.ccdproc.ccdtype = ''
    _gain = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'gain')
    _ron = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'ron')
    iraf.specred.apall.readnoi = _ron
    iraf.specred.apall.gain = _gain
    iraf.specred.dispaxi = 2
    iraf.longslit.dispaxi = 2
    iraf.longslit.mode = 'h'
    iraf.specred.mode = 'h'
    iraf.noao.mode = 'h'
    iraf.ccdred.instrument = "ccddb$kpno/camera.dat"
    iraf.set(direc=ntt.__path__[0] + '/')
    for img in imglist:
        hdr = ntt.util.readhdr(img)
        _tech = ntt.util.readkey3(hdr, 'tech')
        if _tech != 'SPECTRUM':
            sys.exit('error: ' + str(img) + ' is not a spectrum ')
        print '\n####  image name = ' + img + '\n'
        _grism0 = readkey3(hdr, 'grism')
        _filter0 = readkey3(hdr, 'filter')
        _slit0 = readkey3(hdr, 'slit')
        _object0 = readkey3(hdr, 'object')
        _date0 = readkey3(hdr, 'date-night')
        setup = (_grism0, _filter0, _slit0)
        _biassec0 = '[3:1010,1026:1029]'
        if _grism0 == 'Gr16':
            _trimsec0 = '[100:950,1:950]'
        elif _grism0 == 'Gr13':
            if _filter0 == 'Free':
                _trimsec0 = '[100:950,1:1015]'
            elif _filter0 == 'GG495':
                _trimsec0 = '[100:950,208:1015]'
            elif _filter0 == 'OG530':
                _trimsec0 = '[100:950,300:1015]'
        elif _grism0 == 'Gr11':
            _trimsec0 = '[100:950,5:1015]'
        else:
            _trimsec0 = '[100:950,5:1015]'
        _object0 = re.sub(' ', '', _object0)
        _object0 = re.sub('/', '_', _object0)
        nameout0 = 't' + str(_object0) + '_' + str(_date0)
        for _set in setup:
            nameout0 = nameout0 + '_' + _set
        nameout0 = ntt.util.name_duplicate(img, nameout0, '')
        timg = nameout0
        if os.path.isfile(timg):
            os.system('rm -rf ' + timg)
        iraf.imcopy(img, output=timg)
        iraf.ccdproc(timg, output='', overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='column',
                     trimsec=str(_trimsec0), biassec=_biassec0, Stdout=1)
        img = timg
        if _listarc:
            arcfile = ntt.util.searcharc(img, _listarc)[0]
        else:
            arcfile = ''
        if not arcfile:
            arcfile = ntt.util.searcharc(img, '')[0]
        else:
            iraf.ccdproc(arcfile, output='t' + arcfile, overscan='no', trim='yes', zerocor="no", flatcor="no",
                         readaxi='column', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1)
            arcfile = 't' + arcfile

        if _cosmic:
            # print cosmic rays rejection
            ntt.cosmics.lacos(img, output='', gain=_gain, readn=_ron, xorder=9, yorder=9, sigclip=4.5, sigfrac=0.5,
                              objlim=1, verbose=True, interactive=False)
            print '\n### cosmic rays rejections ........ done '

        if not arcfile:
            print '\n### warning no arcfile \n exit '
        else:
            arcref = ntt.util.searcharc(img, '')[0]
            if arcfile[0] == '/':
                os.system('cp ' + arcfile + ' ' +
                          string.split(arcfile, '/')[-1])
                arcfile = string.split(arcfile, '/')[-1]
            arcref = string.split(arcref, '/')[-1]
            if arcref:
                os.system('cp ' + arcref + ' .')
                arcref = string.split(arcref, '/')[-1]
                if not os.path.isdir('database/'):
                    os.mkdir('database/')
                if os.path.isfile(ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '', arcref)):
                    os.system('cp ' + ntt.util.searcharc(img, '')[1] + '/database/id' + re.sub('.fits', '',
                                                                                               arcref) + ' database/')
                iraf.longslit.reidentify(referenc=arcref, images=arcfile, interac=_inter, section='column 10',
                                         coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=0,
                                         newaps='no', nsum=5, nlost=2, mode='h', verbose='no')
            else:
                iraf.longslit.identify(images=arcfile, section='column 10',
                                       coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', nsum=10, fwidth=7,
                                       order=3, mode='h')
            iraf.longslit.reident(referenc=arcfile, images=arcfile, interac='NO', section='column 10',
                                  coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat', overrid='yes', step=10,
                                  newaps='yes', nsum=5, nlost=2, mode='h', verbose='no')
            qqq = iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile), fitname=re.sub('.fits', '', arcfile),
                                          interac='no', combine='yes', databas='database',
                                          function='legendre', yorder=4, logfile='logfile', plotfil='', mode='h')
            iraf.specred.transform(input=img, output=img, minput='', fitnames=re.sub('.fits', '', arcfile),
                                   databas='database',
                                   x1='INDEF', x2='INDEF', y1='INDEF', y2='INDEF', flux='yes', mode='h',
                                   logfile='logfile')
            # ######################  check wavelength calibration ############
            _skyfile = ntt.__path__[
                0] + '/standard/ident/sky_' + setup[0] + '_' + setup[1] + '.fits'
            shift = ntt.efoscspec2Ddef.skyfrom2d(img, _skyfile)
            print '\n###     check in wavelengh performed ...... spectrum shifted of  ' + str(shift) + ' Angstrom \n'
            zro = pyfits.open(img)[0].header.get('CRVAL2')
            ntt.util.updateheader(img, 0, {'CRVAL2': [zro + int(shift), '']})
            std, rastd, decstd, magstd = ntt.util.readstandard(
                'standard_efosc_mab.txt')
            hdrt = readhdr(img)
            _ra = readkey3(hdrt, 'RA')
            _dec = readkey3(hdrt, 'DEC')
            _object = readkey3(hdrt, 'object')
            dd = np.arccos(np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) *
                           np.cos(decstd * scal) * np.cos((_ra - rastd) * scal)) * ((180 / np.pi) * 3600)
            if min(dd) < 100:
                _type = 'stdsens'
                ntt.util.updateheader(
                    img, 0, {'stdname': [std[np.argmin(dd)], '']})
                ntt.util.updateheader(
                    img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']})
            else:
                _type = 'obj'
            print '\n###      EXTRACTION USING IRAF TASK APALL \n'
            result = []
            if _type == 'obj':
                imgex = ntt.util.extractspectrum(
                    img, dv, _ext_trace, _dispersionline, _interactive, _type)
                ntt.util.updateheader(
                    imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']})
                ntt.util.updateheader(imgex, 0, {
                    'PRODCATG': ['SCIENCE.' + readkey3(readhdr(imgex), 'tech').upper(), 'Data product category']})
                ntt.util.updateheader(imgex, 0, {'TRACE1': [img, '']})
                result.append(imgex)
                if _listsens:
                    sensfile = ntt.util.searchsens(img, _listsens)[0]
                else:
                    sensfile = ''
                if not sensfile:
                    sensfile = ntt.util.searchsens(img, '')[0]
                if sensfile:
                    imgf = re.sub('.fits', '_f.fits', img)
                    _extinctdir = 'direc$standard/extinction/'
                    _extinction = 'extinction_lasilla.dat'
                    _observatory = 'lasilla'
                    _exptime = readkey3(hdrt, 'exptime')
                    _airmass = readkey3(hdrt, 'airmass')
                    ntt.util.delete(imgf)
                    iraf.specred.calibrate(input=imgex, output=imgf, sensiti=sensfile, extinct='yes',
                                           flux='yes', ignorea='yes', extinction=_extinctdir + _extinction,
                                           observatory=_observatory, airmass=_airmass, exptime=_exptime,
                                           fnu='no')
                    hedvec = {'SENSFUN': [string.split(sensfile, '/')[-1], 'sensitivity function'],
                              'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum '],
                              'SNR': [ntt.util.StoN2(imgf, False), 'Average S/N ratio'],
                              'BUNIT': ['erg/cm2/s/Angstrom', 'Flux Calibration Units'], 'TRACE1': [imgex, '']}
                    ntt.util.updateheader(imgf, 0, hedvec)
                    imgout = imgf
                    imgd = ntt.efoscspec1Ddef.fluxcalib2d(img, sensfile)
                    ntt.util.updateheader(
                        imgd, 0, {'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum ']})
                    ntt.util.updateheader(imgd, 0, {'TRACE1': [img, '']})
                    imgasci = re.sub('.fits', '.asci', imgout)
                    ntt.util.delete(imgasci)
                    iraf.onedspec.wspectext(
                        imgout + '[*,1,1]', imgasci, header='no')
                    result = result + [imgout, imgd, imgasci]
            else:
                imgex = ntt.util.extractspectrum(
                    img, dv, _ext_trace, _dispersionline, _interactive, 'std')
                imgout = ntt.efoscspec1Ddef.sensfunction(
                    imgex, 'spline3', 6, _inter)
                result = result + [imgout]

    for img in result:
        if img[-5:] == '.fits':
            ntt.util.phase3header(img)  # phase 3 definitions
            ntt.util.airmass(img)  # phase 3 definitions
            ntt.util.updateheader(
                img, 0, {'quality': ['Rapid', 'Final or Rapid reduction']})
    return result
Exemplo n.º 41
0
def load_modules():
    # Define a function to load all of the modules so that they don't' import 
    # unless we need them
    global iraf
    from pyraf import iraf
    iraf.pysalt()
    iraf.saltspec()
    iraf.saltred()
    iraf.set(clobber='YES')
    
    global sys
    import sys

    global os
    import os

    global shutil
    import shutil

    global glob
    from glob import glob
    
    global pyfits
    import pyfits

    global np
    import numpy as np
    
    global lacosmicx
    import lacosmicx
    
    global interp
    from scipy import interp
    
    global signal
    from scipy import signal
    
    global ndimage
    from scipy import ndimage
    
    global interpolate
    from scipy import interpolate
    
    global WCS
    from astropy.wcs import WCS
    
    global optimize
    from scipy import optimize
    
    global ds9
    import ds9
    
    global GaussianProcess
    from sklearn.gaussian_process import GaussianProcess
    
    global pandas
    import pandas
    
    iraf.onedspec()
    iraf.twodspec()
    iraf.longslit()
    iraf.apextract()
    iraf.imutil()
Exemplo n.º 42
0
    print 'Trying /home/lemoen/'
    os.chdir('/home/lemoen/')
except:
    print 'We must be on corvus then... trying /home/ezietsman/'
    os.chdir('/home/ezietsman/')
    
from pyraf import iraf
os.chdir(cd)
try:
    os.remove('splot.log')
except:
    pass
# load NOAO package
iraf.noao()
iraf.astutil()
iraf.onedspec()


times = []
results = []
fwhm = []

for i in range(61,395):
    print i
    
    # open spectrum and calculate continuum level near Ha line then write to cursor file
    data = pf.getdata('fec2117_%04d.fits'%i)
    head = pf.getheader('fec2117_%04d.fits'%i)
    start = head['CRVAL1']
    step = head['CDELT1']
    length = head['NAXIS1']
Exemplo n.º 43
0
"""

import os
import shutil

import numpy as np
import pyfits as pf
from pyraf import iraf, iraffunctions
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages

from config import *
from run_ppxf import wavelength_array

iraf.onedspec(_doprint=0, Stdout=1)

def select_specs(specs):
    specs = np.array(specs, dtype=str)
    exptimes = np.zeros(len(specs))
    for i, spec in enumerate(specs):
        exptimes[i] = pf.getval(spec, "exptime")
    index = exptimes > 100.
    return specs[index].tolist()

if __name__ == "__main__":
    wdir = os.path.join(home, "data/reduced")
    outroot = wdir.replace("reduced", "combined")
    outroot2 = wdir.replace("reduced", "single")
    if not os.path.exists(outroot):
        os.mkdir(outroot)
Exemplo n.º 44
0
def telluric_atmo(imgstd):
    # print "LOGX:: Entering `telluric_atmo` method/function in %(__file__)s"
    # % globals()
    import numpy as np
    import ntt
    from pyraf import iraf

    try:        import pyfits
    except:     from astropy.io import fits as pyfits

    iraf.images(_doprint=0)
    iraf.noao(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    iraf.onedspec(_doprint=0)
    toforget = ['imfilter.gauss', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard',
                'onedspec.wspectext']
    for t in toforget:
        iraf.unlearn(t)

    _grism = ntt.util.readkey3(ntt.util.readhdr(imgstd), 'grism')
    imgout = 'invers_atmo_' + imgstd
    ntt.util.delete(imgout)
    iraf.set(direc=ntt.__path__[0] + '/')
    _cursor = 'direc$standard/ident/cursor_sky_0'
    iraf.noao.onedspec.bplot(imgstd, cursor=_cursor,
                             spec2=imgstd, new_ima=imgout, overwri='yes')
    xxstd, ffstd = ntt.util.readspectrum(imgout)
    if _grism in ['Gr13', 'Gr16']:
        llo2 = np.compress((np.array(xxstd) >= 7550) & (
            np.array(xxstd) <= 7750), np.array(xxstd))
        llh2o = np.compress((np.array(xxstd) >= 7100) & (
            np.array(xxstd) <= 7500), np.array(xxstd))
        ffo2 = np.compress((np.array(xxstd) >= 7550) & (
            np.array(xxstd) <= 7750), np.array(ffstd))
        ffh2o = np.compress((np.array(xxstd) >= 7100) & (
            np.array(xxstd) <= 7500), np.array(ffstd))
    elif _grism in ['Gr11']:
        llo2 = np.compress((np.array(xxstd) >= 6830) & (
            np.array(xxstd) <= 7100), np.array(xxstd))
        llh2o = np.compress((np.array(xxstd) >= 7100) & (
            np.array(xxstd) <= 7500), np.array(xxstd))
        ffo2 = np.compress((np.array(xxstd) >= 6830) & (
            np.array(xxstd) <= 7100), np.array(ffstd))
        ffh2o = np.compress((np.array(xxstd) >= 7100) & (
            np.array(xxstd) <= 7500), np.array(ffstd))
    if _grism in ['Gr13', 'Gr16', 'Gr11']:
        _skyfileh2o = 'direc$standard/ident/ATLAS_H2O.fits'
        _skyfileo2 = 'direc$standard/ident/ATLAS_O2.fits'
        atlas_smooto2 = '_atlas_smoot_o2.fits'
        atlas_smooth2o = '_atlas_smoot_h2o.fits'
        _sigma = 200
        ntt.util.delete(atlas_smooto2)
        ntt.util.delete(atlas_smooth2o)
        iraf.imfilter.gauss(_skyfileh2o, output=atlas_smooth2o, sigma=_sigma)
        iraf.imfilter.gauss(_skyfileo2, output=atlas_smooto2, sigma=_sigma)
        llskyh2o, ffskyh2o = ntt.util.readspectrum(atlas_smooth2o)
        llskyo2, ffskyo2 = ntt.util.readspectrum(atlas_smooto2)
        ffskyo2cut = np.interp(llo2, llskyo2, ffskyo2)
        ffskyh2ocut = np.interp(llh2o, llskyh2o, ffskyh2o)
        _scaleh2o = []
        integral_h2o = []
        for i in range(1, 21):
            j = 0.6 + i * 0.04
            _ffskyh2ocut = list((np.array(ffskyh2ocut) * j) + 1 - j)
            diff_h2o = abs(_ffskyh2ocut - ffh2o)
            integraleh2o = np.trapz(diff_h2o, llh2o)
            integral_h2o.append(integraleh2o)
            _scaleh2o.append(j)
        _scaleo2 = []
        integral_o2 = []
        for i in range(1, 21):
            j = 0.6 + i * 0.04
            _ffskyo2cut = list((np.array(ffskyo2cut) * j) + 1 - j)
            diff_o2 = abs(_ffskyo2cut - ffo2)
            integraleo2 = np.trapz(diff_o2, llo2)
            integral_o2.append(integraleo2)
            _scaleo2.append(j)
        sh2o = _scaleh2o[np.argmin(integral_h2o)]
        so2 = _scaleo2[np.argmin(integral_o2)]
        telluric_features = ((np.array(ffskyh2o) * sh2o) +
                             1 - sh2o) + ((np.array(ffskyo2) * so2) + 1 - so2) - 1
        telluric_features = np.array([1] + list(telluric_features) + [1])
        llskyo2 = np.array([1000] + list(llskyo2) + [15000])
        telluric_features_cut = np.interp(xxstd, llskyo2, telluric_features)
        _imgout = 'atmo_' + imgstd

        data1, hdr = pyfits.getdata(imgstd, 0, header=True)
        data1[0] = np.array(telluric_features_cut)
        data1[1] = data1[1] / data1[1]
        data1[2] = data1[2] / data1[2]
        data1[3] = data1[3] / data1[3]
        ntt.util.delete(_imgout)
        pyfits.writeto(_imgout, np.float32(data1), hdr)
        ntt.util.delete(atlas_smooto2)
        ntt.util.delete(atlas_smooth2o)
        ntt.util.delete(imgout)
    else:
        _imgout = ''
        print '### telluric correction with model not possible '
    return _imgout
Exemplo n.º 45
0
def efoscspec1Dredu(files, _interactive, _ext_trace, _dispersionline, liststandard, listatmo0, _automaticex,
                    _verbose=False):
    # print "LOGX:: Entering `efoscspec1Dredu` method/function in
    # %(__file__)s" % globals()
    import ntt

    try:        import pyfits
    except:     from astropy.io import fits as pyfits

    import re
    import string
    import sys
    import os
    import numpy as np

    os.environ["PYRAF_BETA_STATUS"] = "1"
    _extinctdir = 'direc$standard/extinction/'
    _extinction = 'lasilla2.txt'
    _observatory = 'lasilla'
    import datetime

    now = datetime.datetime.now()
    datenow = now.strftime('20%y%m%d%H%M')
    MJDtoday = 55927 + (datetime.date.today() - datetime.date(2012, 01, 01)).days
    dv = ntt.dvex()
    scal = np.pi / 180.
    _gain = ntt.util.readkey3(ntt.util.readhdr(
        re.sub('\n', '', files[0])), 'gain')
    _rdnoise = ntt.util.readkey3(
        ntt.util.readhdr(re.sub('\n', '', files[0])), 'ron')
    std, rastd, decstd, magstd = ntt.util.readstandard(
        'standard_efosc_mab.txt')
    objectlist = {}
    for img in files:
        hdr = ntt.util.readhdr(img)
        img = re.sub('\n', '', img)
        ntt.util.correctcard(img)
        _ra = ntt.util.readkey3(hdr, 'RA')
        _dec = ntt.util.readkey3(hdr, 'DEC')
        _object = ntt.util.readkey3(hdr, 'object')
        _grism = ntt.util.readkey3(hdr, 'grism')
        _filter = ntt.util.readkey3(hdr, 'filter')
        _slit = ntt.util.readkey3(hdr, 'slit')
        dd = np.arccos(np.sin(_dec * scal) * np.sin(decstd * scal) + np.cos(_dec * scal) *
                       np.cos(decstd * scal) * np.cos((_ra - rastd) * scal)) * ((180 / np.pi) * 3600)
        if min(dd) < 100:
            _type = 'stdsens'
        else:
            _type = 'obj'
        if min(dd) < 100:
            ntt.util.updateheader(
                img, 0, {'stdname': [std[np.argmin(dd)], '']})
            ntt.util.updateheader(
                img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']})

        if _type not in objectlist:
            objectlist[_type] = {}
        if (_grism, _filter, _slit) not in objectlist[_type]:
            objectlist[_type][_grism, _filter, _slit] = [img]
        else:
            objectlist[_type][_grism, _filter, _slit].append(img)

    from pyraf import iraf
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.specred(_doprint=0)
    iraf.imutil(_doprint=0)
    toforget = ['imutil.imcopy', 'specred.sarith', 'specred.standard']
    for t in toforget:
        iraf.unlearn(t)
    iraf.specred.verbose = 'no'
    iraf.specred.dispaxi = 2
    iraf.set(direc=ntt.__path__[0] + '/')
    sens = {}
    print objectlist
    outputfile = []
    if 'obj' in objectlist.keys():
        tpe = 'obj'
    elif 'stdsens' in objectlist.keys():
        tpe = 'stdsens'
    else:
        sys.exit('error: no objects and no standards in the list')

    for setup in objectlist[tpe]:
        extracted = []
        listatmo = []
        if setup not in sens:
            sens[setup] = []
        if tpe == 'obj':
            print '\n### setup= ', setup, '\n### objects= ', objectlist['obj'][setup], '\n'
            for img in objectlist['obj'][setup]:
                #              hdr=readhdr(img)
                print '\n\n### next object= ', img, ' ', ntt.util.readkey3(ntt.util.readhdr(img), 'object'), '\n'
                if os.path.isfile(re.sub('.fits', '_ex.fits', img)):
                    if ntt.util.readkey3(ntt.util.readhdr(re.sub('.fits', '_ex.fits', img)), 'quality') == 'Rapid':
                        ntt.util.delete(re.sub('.fits', '_ex.fits', img))
                imgex = ntt.util.extractspectrum(img, dv, _ext_trace, _dispersionline, _interactive, 'obj',
                                                 automaticex=_automaticex)
                if not os.path.isfile(imgex):
                    sys.exit('### error, extraction not computed')
                if not ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') and \
                        ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') != 0.0:
                    # if not readkey3(readhdr(imgex),'shift'):
                    ntt.efoscspec1Ddef.checkwavestd(imgex, _interactive)
                extracted.append(imgex)
                if imgex not in outputfile:
                    outputfile.append(imgex)
                ntt.util.updateheader(
                    imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']})
                ntt.util.updateheader(imgex, 0, {
                    'PRODCATG': ['SCIENCE.' +
                                 ntt.util.readkey3(ntt.util.readhdr(imgex), 'tech').upper(), 'Data product category']})
                ntt.util.updateheader(
                    imgex, 0, {'TRACE1': [img, 'Originating file']})
                if os.path.isfile('database/ap' + re.sub('_ex.fits', '', imgex)):
                    if 'database/ap' + re.sub('_ex.fits', '', imgex) not in outputfile:
                        outputfile.append(
                            'database/ap' + re.sub('_ex.fits', '', imgex))
            print '\n### all object with this setup extracted\n'
        if liststandard:
            standardlist = liststandard
            _type = 'stdfromdreducer'
        else:
            try:
                standardlist = objectlist['stdsens'][setup]
                _type = 'stdsens'
            except:
                standardlist = ''
                _type = ''
        if _type == 'stdfromdreducer' and len(extracted) >= 1:
            _outputsens2 = ntt.util.searchsens(extracted[0], standardlist)[0]
            print '\n### using standard from reducer ' + str(_outputsens2)
        elif _type not in ['stdsens', 'stdfromdreducer'] and len(extracted) >= 1:
            _outputsens2 = ntt.util.searchsens(extracted[0], '')[0]
            os.system('cp ' + _outputsens2 + ' .')
            _outputsens2 = string.split(_outputsens2, '/')[-1]
            print '\n### no standard in the list, using standard from archive'
        else:
            for simg in standardlist:
                print '\n###  standard for setup ' + \
                      str(setup) + ' = ', simg, ' ', ntt.util.readkey3(
                          ntt.util.readhdr(simg), 'object'), '\n'
                simgex = ntt.util.extractspectrum(
                    simg, dv, False, False, _interactive, 'std', automaticex=_automaticex)
                ntt.util.updateheader(
                    simgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']})
                ntt.util.updateheader(simgex, 0, {
                    'PRODCATG': [
                        'SCIENCE.' + ntt.util.readkey3(ntt.util.readhdr(simgex), 'tech').upper(), 'Data product category']})
                ntt.util.updateheader(
                    simgex, 0, {'TRACE1': [simg, 'Originating file']})
                if not ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') and \
                        ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') != 0.0:
                    #                if not readkey3(readhdr(simgex),'shift'):
                    ntt.efoscspec1Ddef.checkwavestd(simgex, _interactive)
                atmofile = ntt.efoscspec1Ddef.telluric_atmo(
                    simgex)  # atmo file2
                ntt.util.updateheader(
                    atmofile, 0, {'TRACE1': [simgex, 'Originating file']})
                ntt.util.updateheader(
                    atmofile, 0, {'FILETYPE': [21211, 'telluric correction 1D spectrum ']})
                if tpe != 'obj' and atmofile not in outputfile:
                    outputfile.append(atmofile)
                if not listatmo0:
                    listatmo.append(atmofile)
                sens[setup].append(simgex)
                if simgex not in outputfile:
                    outputfile.append(simgex)
                if setup[0] == 'Gr13' and setup[1] == 'Free':
                    if os.path.isfile(re.sub('Free', 'GG495', simg)):
                        print '\n### extract standard frame with blocking filter to correct for second order contamination\n'
                        simg2 = re.sub('Free', 'GG495', simg)
                        simgex2 = ntt.util.extractspectrum(simg2, dv, False, False, _interactive, 'std',
                                                           automaticex=_automaticex)
                        ntt.util.updateheader(
                            simgex2, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']})
                        ntt.util.updateheader(simgex2, 0, {
                            'PRODCATG': ['SCIENCE.' +
                                         ntt.util.readkey3(
                                             ntt.util.readhdr(simgex2), 'tech').upper(), 'Data product category']})
                        if not ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') and \
                                ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') != 0.0:
                            # if not readkey3(readhdr(simgex2),'shift'):
                            ntt.efoscspec1Ddef.checkwavestd(
                                simgex2, _interactive)
                        ntt.util.updateheader(
                            simgex2, 0, {'TRACE1': [simg2, 'Originating file']})
            print '\n### standard available: ', sens[setup]
            if tpe == 'obj':
                if len(sens[setup]) > 1:
                    goon = 'no'
                    while goon != 'yes':
                        stdused = raw_input(
                            '\n### more than one standard for this setup, which one do you want to use [' + sens[setup][
                                0] + '] ?')
                        if not stdused:
                            stdused = sens[setup][0]
                        if os.path.isfile(stdused):
                            goon = 'yes'
                else:
                    stdused = sens[setup][0]
                stdvec = [stdused]
            else:
                stdvec = sens[setup]
            for stdused in stdvec:
                stdusedclean = re.sub('_ex', '_clean', stdused)
                ntt.util.delete(stdusedclean)
                iraf.specred.sarith(
                    input1=stdused, op='/', input2=atmofile, output=stdusedclean, format='multispec')
                _outputsens2 = ntt.efoscspec1Ddef.sensfunction(
                    stdusedclean, 'spline3', 16, _interactive)
                ntt.util.updateheader(_outputsens2, 0, {'FILETYPE': [
                                      21212, 'sensitivity function']})
                ntt.util.updateheader(
                    _outputsens2, 0, {'TRACE1': [stdused, 'Originating file']})

                if setup[0] == 'Gr13' and setup[1] == 'Free':
                    if os.path.isfile(re.sub('Free', 'GG495', stdused)):
                        print '\n### compute sensitivity function of grim 13 with blocking filter ' \
                              'to correct for second order contamination \n'
                        stdused2 = re.sub('Free', 'GG495', stdused)
                        if not ntt.util.readkey3(ntt.util.readhdr(stdused2), 'STDNAME'):
                            ntt.util.updateheader(stdused2, 0, {
                                'STDNAME': [ntt.util.readkey3(ntt.util.readhdr(stdused), 'STDNAME'), '']})
                        atmofile2 = ntt.efoscspec1Ddef.telluric_atmo(
                            stdused2)  # atmo file2
                        stdusedclean2 = re.sub('_ex', '_clean', stdused2)
                        ntt.util.delete(stdusedclean2)
                        iraf.specred.sarith(input1=stdused2, op='/', input2=atmofile2, output=stdusedclean2,
                                            format='multispec')
                        _outputsens3 = ntt.efoscspec1Ddef.sensfunction(
                            stdusedclean2, 'spline3', 16, _interactive)
                        ntt.util.updateheader(_outputsens3, 0, {'FILETYPE': [
                                              21212, 'sensitivity function']})
                        ntt.util.updateheader(
                            _outputsens3, 0, {'TRACE1': [stdused2, 'Originating file']})
                        _outputsens2 = correctsens(_outputsens2, _outputsens3)

                if _outputsens2 not in outputfile:
                    outputfile.append(_outputsens2)
        if _outputsens2 and tpe == 'obj':
            ####################################################
            for img in objectlist['obj'][setup]:  # flux calibrate 2d images
                imgd = fluxcalib2d(img, _outputsens2)
                ntt.util.updateheader(
                    imgd, 0, {'FILETYPE': [22209, '2D wavelength and flux calibrated spectrum ']})
                ntt.util.updateheader(
                    imgd, 0, {'TRACE1': [img, 'Originating files']})
                iraf.hedit(imgd, 'PRODCATG', delete='yes',
                           update='yes', verify='no')
                if imgd not in outputfile:
                    outputfile.append(imgd)
            ####################################################
            #    flux calib in the standard way
            if not listatmo and listatmo0:
                listatmo = listatmo0[:]
            for _imgex in extracted:
                _airmass = ntt.util.readkey3(
                    ntt.util.readhdr(_imgex), 'airmass')
                _exptime = ntt.util.readkey3(
                    ntt.util.readhdr(_imgex), 'exptime')
                _imgf = re.sub('_ex.fits', '_f.fits', _imgex)
                ntt.util.delete(_imgf)
                qqq = iraf.specred.calibrate(input=_imgex, output=_imgf, sensiti=_outputsens2, extinct='yes',
                                             flux='yes',
                                             extinction=_extinctdir + _extinction, observatory=_observatory,
                                             airmass=_airmass, ignorea='yes', exptime=_exptime, fnu='no')
                hedvec = {'SENSFUN': [_outputsens2, ''],
                          'FILETYPE': [22208, '1D wavelength and flux calibrated spectrum', ''],
                          #                     'SNR':[ntt.util.StoN(_imgf,50),'Average signal to noise ratio per pixel'],
                          'SNR': [ntt.util.StoN2(_imgf, False), 'Average signal to noise ratio per pixel'],
                          'BUNIT': ['erg/cm2/s/Angstrom', 'Physical unit of array values'],
                          'TRACE1': [_imgex, 'Originating file'],
                          'ASSON1': [re.sub('_f.fits', '_2df.fits', _imgf), 'Name of associated file'],
                          'ASSOC1': ['ANCILLARY.2DSPECTRUM', 'Category of associated file']}
                ntt.util.updateheader(_imgf, 0, hedvec)
                if _imgf not in outputfile:
                    outputfile.append(_imgf)
                if listatmo:
                    atmofile = ntt.util.searcharc(_imgex, listatmo)[0]
                    if atmofile:
                        _imge = re.sub('_f.fits', '_e.fits', _imgf)
                        ntt.util.delete(_imge)
                        iraf.specred.sarith(input1=_imgf, op='/', input2=atmofile, output=_imge, w1='INDEF', w2='INDEF',
                                            format='multispec')
                        try:
                            iraf.imutil.imcopy(
                                input=_imgf + '[*,1,2]', output=_imge + '[*,1,2]', verbose='no')
                        except:
                            pass
                        try:
                            iraf.imutil.imcopy(
                                input=_imgf + '[*,1,3]', output=_imge + '[*,1,3]', verbose='no')
                        except:
                            pass
                        try:
                            iraf.imutil.imcopy(
                                input=_imgf + '[*,1,4]', output=_imge + '[*,1,4]', verbose='no')
                        except:
                            pass
                        if _imge not in outputfile:
                            outputfile.append(_imge)
                        ntt.util.updateheader(
                            _imge, 0, {'FILETYPE': [22210, '1D, wave, flux calib, telluric corr.']})
                        if atmofile not in outputfile:
                            outputfile.append(atmofile)
                        ntt.util.updateheader(
                            _imge, 0, {'ATMOFILE': [atmofile, '']})
                        ntt.util.updateheader(
                            _imge, 0, {'TRACE1': [_imgf, 'Originating file']})
                        imgin = _imge
                    else:
                        imgin = _imgf
                else:
                    imgin = _imgf
                imgasci = re.sub('.fits', '.asci', imgin)

                ntt.util.delete(imgasci)
                iraf.onedspec(_doprint=0)
                iraf.onedspec.wspectext(
                    imgin + '[*,1,1]', imgasci, header='no')
                if imgasci not in outputfile:
                    outputfile.append(imgasci)

    print '\n### adding keywords for phase 3 ....... '
    for img in outputfile:
        if str(img)[-5:] == '.fits':
            try:
                ntt.util.phase3header(img)  # phase 3 definitions
                ntt.util.updateheader(img, 0, {'quality': ['Final', '']})
            except:
                print 'Warning: ' + img + ' is not a fits file'
            try:
                if int(re.sub('\.', '', str(pyfits.__version__))[:2]) <= 30:
                    aa = 'HIERARCH '
                else:
                    aa = ''
            except:
                aa = ''

            imm = pyfits.open(img, mode='update')
            hdr = imm[0].header
            if aa + 'ESO DPR CATG' in hdr:
                hdr.pop(aa + 'ESO DPR CATG')
            if aa + 'ESO DPR TECH' in hdr:
                hdr.pop(aa + 'ESO DPR TECH')
            if aa + 'ESO DPR TYPE' in hdr:
                hdr.pop(aa + 'ESO DPR TYPE')
            imm.flush()
            imm.close()

    print outputfile
    reduceddata = ntt.rangedata(outputfile)
    f = open('logfile_spec1d_' + str(reduceddata) +
             '_' + str(datenow) + '.raw.list', 'w')
    for img in outputfile:
        try:
            f.write(ntt.util.readkey3(ntt.util.readhdr(img), 'arcfile') + '\n')
        except:
            pass
    f.close()
    return outputfile, 'logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) + '.raw.list'
import matplotlib.pyplot as plt
from datetime import date
import operator
import time
import traceback
from lmfit import minimize, Parameters, report_fit

#import cosmics

iraf.noao(_doprint=0,Stdout="/dev/null")
iraf.rv(_doprint=0,Stdout="/dev/null")
iraf.imred(_doprint=0,Stdout="/dev/null")
iraf.ccdred(_doprint=0,Stdout="/dev/null")
iraf.images(_doprint=0,Stdout="/dev/null")
iraf.immatch(_doprint=0,Stdout="/dev/null")
iraf.onedspec(_doprint=0,Stdout="/dev/null")
iraf.twodspec(_doprint=0,Stdout="/dev/null")
iraf.apextract(_doprint=0,Stdout="/dev/null")
iraf.imutil(_doprint=0,Stdout="/dev/null")
iraf.echelle(_doprint=0,Stdout="/dev/null")
iraf.astutil(_doprint=0,Stdout="/dev/null")
iraf.apextract.dispaxi=1
iraf.echelle.dispaxi=1
#fixes a bug with latest versions of iraf
iraf.ccdred.instrum='blank.txt'


os.environ['PYRAF_BETA_STATUS'] = '1'

# REFS NEW: ecEC59550 ecEC59758 ecEC59795 ecEC59842 ecEC59844 ecEC59864 ecEC59866 ecEC59881 ecEC59883 ecEC59885
Exemplo n.º 47
0
def efoscfastredu(imglist, _listsens, _listarc, _ext_trace, _dispersionline,
                  _cosmic, _interactive):
    # print "LOGX:: Entering `efoscfastredu` method/function in %(__file__)s"
    # % globals()
    import string
    import os
    import re
    import sys
    os.environ["PYRAF_BETA_STATUS"] = "1"
    try:
        from astropy.io import fits as pyfits
    except:
        import pyfits
    from ntt.util import readhdr, readkey3
    import ntt
    import numpy as np
    dv = ntt.dvex()
    scal = np.pi / 180.
    if not _interactive:
        _interactive = False
        _inter = 'NO'
    else:
        _inter = 'YES'
    from pyraf import iraf

    iraf.noao(_doprint=0, Stdout=0)
    iraf.imred(_doprint=0, Stdout=0)
    iraf.ccdred(_doprint=0, Stdout=0)
    iraf.twodspec(_doprint=0, Stdout=0)
    iraf.longslit(_doprint=0, Stdout=0)
    iraf.onedspec(_doprint=0, Stdout=0)
    iraf.specred(_doprint=0, Stdout=0)
    toforget = [
        'ccdproc', 'imcopy', 'specred.apall', 'longslit.identify',
        'longslit.reidentify', 'specred.standard', 'longslit.fitcoords',
        'onedspec.wspectext'
    ]
    for t in toforget:
        iraf.unlearn(t)
    iraf.ccdred.verbose = 'no'  # not print steps
    iraf.specred.verbose = 'no'  # not print steps
    iraf.ccdproc.darkcor = 'no'
    iraf.ccdproc.fixpix = 'no'
    iraf.ccdproc.flatcor = 'no'
    iraf.ccdproc.zerocor = 'no'
    iraf.ccdproc.ccdtype = ''
    _gain = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'gain')
    _ron = ntt.util.readkey3(ntt.util.readhdr(imglist[0]), 'ron')
    iraf.specred.apall.readnoi = _ron
    iraf.specred.apall.gain = _gain
    iraf.specred.dispaxi = 2
    iraf.longslit.dispaxi = 2
    iraf.longslit.mode = 'h'
    iraf.specred.mode = 'h'
    iraf.noao.mode = 'h'
    iraf.ccdred.instrument = "ccddb$kpno/camera.dat"
    iraf.set(direc=ntt.__path__[0] + '/')
    for img in imglist:
        hdr = ntt.util.readhdr(img)
        _tech = ntt.util.readkey3(hdr, 'tech')
        if _tech != 'SPECTRUM':
            sys.exit('error: ' + str(img) + ' is not a spectrum ')
        print '\n####  image name = ' + img + '\n'
        _grism0 = readkey3(hdr, 'grism')
        _filter0 = readkey3(hdr, 'filter')
        _slit0 = readkey3(hdr, 'slit')
        _object0 = readkey3(hdr, 'object')
        _date0 = readkey3(hdr, 'date-night')
        setup = (_grism0, _filter0, _slit0)
        _biassec0 = '[3:1010,1026:1029]'
        if _grism0 == 'Gr16':
            _trimsec0 = '[100:950,1:950]'
        elif _grism0 == 'Gr13':
            if _filter0 == 'Free':
                _trimsec0 = '[100:950,1:1015]'
            elif _filter0 == 'GG495':
                _trimsec0 = '[100:950,208:1015]'
            elif _filter0 == 'OG530':
                _trimsec0 = '[100:950,300:1015]'
        elif _grism0 == 'Gr11':
            _trimsec0 = '[100:950,5:1015]'
        else:
            _trimsec0 = '[100:950,5:1015]'
        _object0 = re.sub(' ', '', _object0)
        _object0 = re.sub('/', '_', _object0)
        nameout0 = 't' + str(_object0) + '_' + str(_date0)
        for _set in setup:
            nameout0 = nameout0 + '_' + _set
        nameout0 = ntt.util.name_duplicate(img, nameout0, '')
        timg = nameout0
        if os.path.isfile(timg):
            os.system('rm -rf ' + timg)
        iraf.imcopy(img, output=timg)
        iraf.ccdproc(timg,
                     output='',
                     overscan='no',
                     trim='yes',
                     zerocor="no",
                     flatcor="no",
                     readaxi='column',
                     trimsec=str(_trimsec0),
                     biassec=_biassec0,
                     Stdout=1)
        img = timg
        if _listarc:
            arcfile = ntt.util.searcharc(img, _listarc)[0]
        else:
            arcfile = ''
        if not arcfile:
            arcfile = ntt.util.searcharc(img, '')[0]
        else:
            iraf.ccdproc(arcfile,
                         output='t' + arcfile,
                         overscan='no',
                         trim='yes',
                         zerocor="no",
                         flatcor="no",
                         readaxi='column',
                         trimsec=str(_trimsec0),
                         biassec=str(_biassec0),
                         Stdout=1)
            arcfile = 't' + arcfile

        if _cosmic:
            # print cosmic rays rejection
            ntt.cosmics.lacos(img,
                              output='',
                              gain=_gain,
                              readn=_ron,
                              xorder=9,
                              yorder=9,
                              sigclip=4.5,
                              sigfrac=0.5,
                              objlim=1,
                              verbose=True,
                              interactive=False)
            print '\n### cosmic rays rejections ........ done '

        if not arcfile:
            print '\n### warning no arcfile \n exit '
        else:
            arcref = ntt.util.searcharc(img, '')[0]
            if arcfile[0] == '/':
                os.system('cp ' + arcfile + ' ' +
                          string.split(arcfile, '/')[-1])
                arcfile = string.split(arcfile, '/')[-1]
            arcref = string.split(arcref, '/')[-1]
            if arcref:
                os.system('cp ' + arcref + ' .')
                arcref = string.split(arcref, '/')[-1]
                if not os.path.isdir('database/'):
                    os.mkdir('database/')
                if os.path.isfile(
                        ntt.util.searcharc(img, '')[1] + '/database/id' +
                        re.sub('.fits', '', arcref)):
                    os.system('cp ' + ntt.util.searcharc(img, '')[1] +
                              '/database/id' + re.sub('.fits', '', arcref) +
                              ' database/')
                iraf.longslit.reidentify(
                    referenc=arcref,
                    images=arcfile,
                    interac=_inter,
                    section='column 10',
                    coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat',
                    overrid='yes',
                    step=0,
                    newaps='no',
                    nsum=5,
                    nlost=2,
                    mode='h',
                    verbose='no')
            else:
                iraf.longslit.identify(
                    images=arcfile,
                    section='column 10',
                    coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat',
                    nsum=10,
                    fwidth=7,
                    order=3,
                    mode='h')
            iraf.longslit.reident(
                referenc=arcfile,
                images=arcfile,
                interac='NO',
                section='column 10',
                coordli='direc$standard/ident/Lines_HgCdHeNeAr600.dat',
                overrid='yes',
                step=10,
                newaps='yes',
                nsum=5,
                nlost=2,
                mode='h',
                verbose='no')
            qqq = iraf.longslit.fitcoords(images=re.sub('.fits', '', arcfile),
                                          fitname=re.sub('.fits', '', arcfile),
                                          interac='no',
                                          combine='yes',
                                          databas='database',
                                          function='legendre',
                                          yorder=4,
                                          logfile='logfile',
                                          plotfil='',
                                          mode='h')
            iraf.specred.transform(input=img,
                                   output=img,
                                   minput='',
                                   fitnames=re.sub('.fits', '', arcfile),
                                   databas='database',
                                   x1='INDEF',
                                   x2='INDEF',
                                   y1='INDEF',
                                   y2='INDEF',
                                   flux='yes',
                                   mode='h',
                                   logfile='logfile')
            # ######################  check wavelength calibration ############
            _skyfile = ntt.__path__[0] + '/standard/ident/sky_' + setup[
                0] + '_' + setup[1] + '.fits'
            shift = ntt.efoscspec2Ddef.skyfrom2d(img, _skyfile)
            print '\n###     check in wavelengh performed ...... spectrum shifted of  ' + str(
                shift) + ' Angstrom \n'
            zro = pyfits.open(img)[0].header.get('CRVAL2')
            ntt.util.updateheader(img, 0, {'CRVAL2': [zro + int(shift), '']})
            std, rastd, decstd, magstd = ntt.util.readstandard(
                'standard_efosc_mab.txt')
            hdrt = readhdr(img)
            _ra = readkey3(hdrt, 'RA')
            _dec = readkey3(hdrt, 'DEC')
            _object = readkey3(hdrt, 'object')
            dd = np.arccos(
                np.sin(_dec * scal) * np.sin(decstd * scal) +
                np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos(
                    (_ra - rastd) * scal)) * ((180 / np.pi) * 3600)
            if min(dd) < 100:
                _type = 'stdsens'
                ntt.util.updateheader(img, 0,
                                      {'stdname': [std[np.argmin(dd)], '']})
                ntt.util.updateheader(
                    img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']})
            else:
                _type = 'obj'
            print '\n###      EXTRACTION USING IRAF TASK APALL \n'
            result = []
            if _type == 'obj':
                imgex = ntt.util.extractspectrum(img, dv, _ext_trace,
                                                 _dispersionline, _interactive,
                                                 _type)
                ntt.util.updateheader(
                    imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']})
                ntt.util.updateheader(
                    imgex, 0, {
                        'PRODCATG': [
                            'SCIENCE.' +
                            readkey3(readhdr(imgex), 'tech').upper(),
                            'Data product category'
                        ]
                    })
                ntt.util.updateheader(imgex, 0, {'TRACE1': [img, '']})
                result.append(imgex)
                if _listsens:
                    sensfile = ntt.util.searchsens(img, _listsens)[0]
                else:
                    sensfile = ''
                if not sensfile:
                    sensfile = ntt.util.searchsens(img, '')[0]
                if sensfile:
                    imgf = re.sub('.fits', '_f.fits', img)
                    _extinctdir = 'direc$standard/extinction/'
                    _extinction = 'extinction_lasilla.dat'
                    _observatory = 'lasilla'
                    _exptime = readkey3(hdrt, 'exptime')
                    _airmass = readkey3(hdrt, 'airmass')
                    ntt.util.delete(imgf)
                    iraf.specred.calibrate(input=imgex,
                                           output=imgf,
                                           sensiti=sensfile,
                                           extinct='yes',
                                           flux='yes',
                                           ignorea='yes',
                                           extinction=_extinctdir +
                                           _extinction,
                                           observatory=_observatory,
                                           airmass=_airmass,
                                           exptime=_exptime,
                                           fnu='no')
                    hedvec = {
                        'SENSFUN': [
                            string.split(sensfile, '/')[-1],
                            'sensitivity function'
                        ],
                        'FILETYPE':
                        [22208, '1D wavelength and flux calibrated spectrum '],
                        'SNR':
                        [ntt.util.StoN2(imgf, False), 'Average S/N ratio'],
                        'BUNIT':
                        ['erg/cm2/s/Angstrom', 'Flux Calibration Units'],
                        'TRACE1': [imgex, '']
                    }
                    ntt.util.updateheader(imgf, 0, hedvec)
                    imgout = imgf
                    imgd = ntt.efoscspec1Ddef.fluxcalib2d(img, sensfile)
                    ntt.util.updateheader(
                        imgd, 0, {
                            'FILETYPE': [
                                22209,
                                '2D wavelength and flux calibrated spectrum '
                            ]
                        })
                    ntt.util.updateheader(imgd, 0, {'TRACE1': [img, '']})
                    imgasci = re.sub('.fits', '.asci', imgout)
                    ntt.util.delete(imgasci)
                    iraf.onedspec.wspectext(imgout + '[*,1,1]',
                                            imgasci,
                                            header='no')
                    result = result + [imgout, imgd, imgasci]
            else:
                imgex = ntt.util.extractspectrum(img, dv, _ext_trace,
                                                 _dispersionline, _interactive,
                                                 'std')
                imgout = ntt.efoscspec1Ddef.sensfunction(
                    imgex, 'spline3', 6, _inter)
                result = result + [imgout]

    for img in result:
        if img[-5:] == '.fits':
            ntt.util.phase3header(img)  # phase 3 definitions
            ntt.util.airmass(img)  # phase 3 definitions
            ntt.util.updateheader(
                img, 0, {'quality': ['Rapid', 'Final or Rapid reduction']})
    return result
Exemplo n.º 48
0
def telluric_correction(targetdir,telluricdir,stype):

    """
    Removes telluric lines from 1D spectrum

    Assumes both science and telluric spectrum have been extracted and
    wavelength calibrated, i.e. file dimcomb.ms.fits exists in telluric
    directory.

    """

    print 'Target directory is ' + targetdir

    if os.path.exists( os.path.join(targetdir,'dimcomb.ms.fits') ):
        print "Wavelength calibrated target spectrum 'dimcomb.ms.fits' exists"

    print 'Telluric directory is ' + telluricdir

    if os.path.exists( os.path.join(telluricdir,'dimcomb.ms.fits') ):
        print "Wavelength calibrated telluric spectrum 'dimcomb.ms.fits' exists"

    print 'Generating black-body spectrum...'

    print 'Telluric star type ' + stype
    # from this site http://www.gemini.edu/sciops/instruments/nir/photometry/temps_colors.txt
    if stype == 'A0V': bbtemp = 9480.0
    if stype == 'A1V': bbtemp = 9230.0
    if stype == 'A2V': bbtemp = 8810.0
    if stype == 'A3V': bbtemp = 8270.0
    if stype == 'A4V': bbtemp = 8200.0
    if stype == 'A5V': bbtemp = 8160.0

    print 'Fitting with blackbody, temperature = ' + str(bbtemp) + 'K'

    iraf.noao(_doprint=0)
    iraf.artdata (_doprint=0)

    if os.path.exists( os.path.join(telluricdir,'blackbody.fits') ):
        os.remove( os.path.join(telluricdir,'blackbody.fits') )

    iraf.mk1dspec.setParam('input', os.path.join(telluricdir,'blackbody.fits') )
    iraf.mk1dspec.setParam('title','blackbody')
    iraf.mk1dspec.setParam('ncols',1024)
    iraf.mk1dspec.setParam('wstart',13900)
    iraf.mk1dspec.setParam('wend',24000)
    iraf.mk1dspec.setParam('temperature',bbtemp)

    iraf.mk1dspec()

    print 'Generated blackbody spectrum'

    # Divide telluric star spectrum by black-body

    if os.path.exists( os.path.join(telluricdir,'tdimcomb.ms.fits') ):
        os.remove( os.path.join(telluricdir,'tdimcomb.ms.fits') )

    iraf.onedspec(_doprint=0)

    """
    To divide by blackbody I want to turn off the cross-correlation - which I
    think would only work if both spectra had similar features. I don't want to scale
    the spectrum - which scales the airmass using Beer's Law - or shift it. If the temperature
    is correct I should be able to simply divide. If not then I should change
    the temperature
    """

    iraf.telluric.setParam('input',os.path.join(telluricdir,'dimcomb.ms.fits') ) # List of input spectra to correct
    iraf.telluric.setParam('output',os.path.join(telluricdir,'tdimcomb.ms.fits') ) # List of output corrected spectra
    iraf.telluric.setParam('cal',os.path.join(telluricdir,'blackbody.fits') ) # List of telluric calibration spectra
    iraf.telluric.setParam('answer','yes') # Search interactively?
    iraf.telluric.setParam('xcorr', 'no') # Cross correlate for shift?
    iraf.telluric.setParam('tweakrms', 'no') # Twak to minise rms?
    iraf.telluric.setParam('interactive', 'yes') # Interactive?
    iraf.telluric.setParam('threshold',0.0)  # Threshold for calibration
    iraf.telluric.setParam('offset',1) # Displayed offset between spectra
    iraf.telluric.setParam('sample','15000:18000,19700:23800')
    iraf.telluric.setParam('dshift',5.0)
    iraf.telluric.setParam('smooth',3.0)

    iraf.telluric()

    """
    When your calibration spectrum has zero or negative intensity values,
    you have to set the "threshold" parameter accordingly. As explained in
    the help page for the TELLURIC task, you can think of the "threshold"
    value as the minimum intensity value TELLURIC will accept from your
    calibration spectra. Any intensity value lower than the threshold value
    will be replaced by the threshold.

    """

    """
    I've turned cross-correlation off, since I don't really understand it.
    Tweak is on, but doesn't seem to do that much. Still not sure if it's understanding
    the airmass
    """

    print 'Now correcting target spectrum...'

    if os.path.exists( os.path.join(targetdir,'tdimcomb+bkgd.ms.fits') ):
        os.remove( os.path.join(targetdir,'tdimcomb+bkgd.ms.fits') )

    iraf.telluric.setParam('input',os.path.join(targetdir,'dimcomb+bkgd.ms.fits') ) # List of input spectra to correct
    iraf.telluric.setParam('output',os.path.join(targetdir,'tdimcomb+bkgd.ms.fits') ) # List of output corrected spectra
    iraf.telluric.setParam('cal',os.path.join(telluricdir,'tdimcomb.ms.fits') ) # List of telluric calibration spectra
    iraf.telluric.setParam('answer','yes') # Search interactively?
    iraf.telluric.setParam('threshold',0.0)
    iraf.telluric.setParam('xcorr', 'no') # Cross correlate for shift?
    iraf.telluric.setParam('tweakrms', 'yes') # Tweak to minise rms?
    iraf.telluric.setParam('interactive', 'yes') # Interactive?
    iraf.telluric.setParam('offset',6) # Displayed offset between spectra

    hdulist = fits.open(os.path.join(targetdir,'imcomb.ms.fits'))
    hdr = hdulist[0].header
    hdulist.close()

    iraf.telluric.setParam('airmass',hdr['AIRMASS'])

    iraf.telluric()


    return None
Exemplo n.º 49
0
def reduce(imglist, files_arc, files_flat, _cosmic, _interactive_extraction,
           _arc):
    import string
    import os
    import re
    import sys
    import pdb
    os.environ["PYRAF_BETA_STATUS"] = "1"
    try:
        from astropy.io import fits as pyfits
    except:
        import pyfits
    import numpy as np
    import util
    import instruments
    import combine_sides as cs
    import cosmics
    from pyraf import iraf

    dv = util.dvex()
    scal = np.pi / 180.

    if not _interactive_extraction:
        _interactive = False
    else:
        _interactive = True

    if not _arc:
        _arc_identify = False
    else:
        _arc_identify = True

    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.ccdred(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.longslit(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.specred(_doprint=0)
    iraf.disp(inlist='1', reference='1')

    toforget = [
        'ccdproc', 'imcopy', 'specred.apall', 'longslit.identify',
        'longslit.reidentify', 'specred.standard', 'longslit.fitcoords',
        'onedspec.wspectext'
    ]
    for t in toforget:
        iraf.unlearn(t)
    iraf.ccdred.verbose = 'no'
    iraf.specred.verbose = 'no'
    iraf.ccdproc.darkcor = 'no'
    iraf.ccdproc.fixpix = 'no'
    iraf.ccdproc.flatcor = 'no'
    iraf.ccdproc.zerocor = 'no'
    iraf.ccdproc.ccdtype = ''

    iraf.longslit.mode = 'h'
    iraf.specred.mode = 'h'
    iraf.noao.mode = 'h'
    iraf.ccdred.instrument = "ccddb$kpno/camera.dat"

    list_arc_b = []
    list_arc_r = []

    for arcs in files_arc:
        hdr = util.readhdr(arcs)
        br, inst = instruments.blue_or_red(arcs)

        if br == 'blue':
            list_arc_b.append(arcs)
        elif br == 'red':
            list_arc_r.append(arcs)
        else:
            errStr = '{} '.format(str(util.readkey3(hdr, 'VERSION')))
            errStr += 'not in database'
            print(errStr)
            sys.exit()

    asci_files = []
    newlist = [[], []]

    print('\n### images to reduce :', imglist)
    #raise TypeError
    for img in imglist:
        if 'b' in img:
            newlist[0].append(img)
        elif 'r' in img:
            newlist[1].append(img)

    if len(newlist[1]) < 1:
        newlist = newlist[:-1]
    elif len(newlist[0]) < 1:
        newlist = newlist[1:]
    else:
        sides = raw_input("Reduce which side? ([both]/b/r): ")
        if sides == 'b':
            newlist = newlist[:-1]
        elif sides == 'r':
            newlist = newlist[1:]

    for imgs in newlist:
        hdr = util.readhdr(imgs[0])
        br, inst = instruments.blue_or_red(imgs[0])
        if br == 'blue':
            flat_file = '../RESP_blue'
        elif br == 'red':
            flat_file = '../RESP_red'
        else:
            errStr = 'Not in intrument list'
            print(errStr)
            sys.exit()

        iraf.specred.dispaxi = inst.get('dispaxis')
        iraf.longslit.dispaxi = inst.get('dispaxis')

        _gain = inst.get('gain')
        _ron = inst.get('read_noise')
        iraf.specred.apall.readnoi = _ron
        iraf.specred.apall.gain = _gain

        _object0 = util.readkey3(hdr, 'OBJECT')
        _date0 = util.readkey3(hdr, 'DATE-OBS')

        _object0 = re.sub(' ', '', _object0)
        _object0 = re.sub('/', '_', _object0)
        nameout0 = str(_object0) + '_' + inst.get('name') + '_' + str(_date0)

        nameout0 = util.name_duplicate(imgs[0], nameout0, '')
        timg = nameout0
        print('\n### now processing :', timg, ' for -> ', inst.get('name'))
        if len(imgs) > 1:
            img_str = ''
            for i in imgs:
                img_str = img_str + i + ','
            iraf.imcombine(img_str, output=timg)
        else:
            img = imgs[0]
            if os.path.isfile(timg):
                os.system('rm -rf ' + timg)
            iraf.imcopy(img, output=timg)

        # should just do this by hand
        iraf.ccdproc(timg,
                     output='',
                     overscan='no',
                     trim='no',
                     zerocor="no",
                     flatcor="yes",
                     readaxi='line',
                     flat=flat_file,
                     Stdout=1)

        img = timg

        #raw_input("Press Enter to continue...")
        if _cosmic:
            print('\n### starting cosmic removal')

            array, header = cosmics.fromfits(img)
            c = cosmics.cosmicsimage(array,
                                     gain=inst.get('gain'),
                                     readnoise=inst.get('read_noise'),
                                     sigclip=5,
                                     sigfrac=0.5,
                                     objlim=2.0)
            c.run(maxiter=5)
            cosmics.tofits('cosmic_' + img, c.cleanarray, header)
            img = 'cosmic_' + img

            print('\n### cosmic removal finished')
        else:
            print(
                '\n### No cosmic removal, saving normalized image for inspection???'
            )

        if inst.get('arm') == 'blue' and len(list_arc_b) > 0:
            arcfile = list_arc_b[0]
        elif inst.get('arm') == 'red' and len(list_arc_r) > 0:
            arcfile = list_arc_r[0]
        else:
            arcfile = None

        if arcfile is not None and not arcfile.endswith(".fits"):
            arcfile = arcfile + '.fits'

        if not os.path.isdir('database/'):
            os.mkdir('database/')

        if _arc_identify:
            os.system('cp ' + arcfile + ' .')
            arcfile = string.split(arcfile, '/')[-1]
            arc_ex = re.sub('.fits', '.ms.fits', arcfile)

            arcref = inst.get('archive_arc_extracted')
            arcref_img = string.split(arcref, '/')[-1]
            arcref_img = arcref_img.replace('.ms.fits', '')
            arcrefid = inst.get('archive_arc_extracted_id')
            os.system('cp ' + arcref + ' .')
            arcref = string.split(arcref, '/')[-1]
            os.system('cp ' + arcrefid + ' ./database')

            aperture = inst.get('archive_arc_aperture')
            os.system('cp ' + aperture + ' ./database')

            print('\n###  arcfile : ', arcfile)
            print('\n###  arcfile extraction : ', arc_ex)
            print('\n###  arc reference : ', arcref)

            # read for some meta data to get the row right
            tmpHDU = pyfits.open(arcfile)
            header = tmpHDU[0].header
            try:
                spatialBin = int(header['binning'].split(',')[0])
            except KeyError:
                spatialBin = 1
            apLine = 700 // spatialBin

            iraf.specred.apall(arcfile,
                               output=arc_ex,
                               ref=arcref_img,
                               line=apLine,
                               nsum=10,
                               interactive='no',
                               extract='yes',
                               find='yes',
                               nfind=1,
                               format='multispec',
                               trace='no',
                               back='no',
                               recen='no')

            iraf.longslit.reidentify(referenc=arcref,
                                     images=arc_ex,
                                     interac='NO',
                                     section=inst.get('section'),
                                     coordli=inst.get('line_list'),
                                     shift='INDEF',
                                     search='INDEF',
                                     mode='h',
                                     verbose='YES',
                                     step=0,
                                     nsum=5,
                                     nlost=2,
                                     cradius=10,
                                     refit='yes',
                                     overrid='yes',
                                     newaps='no')

        print('\n### extraction using apall')
        result = []
        hdr_image = util.readhdr(img)
        _type = util.readkey3(hdr_image, 'object')

        if (_type.startswith("arc") or _type.startswith("dflat")
                or _type.startswith("Dflat") or _type.startswith("Dbias")
                or _type.startswith("Bias")):
            print('\n### warning problem \n exit ')
            sys.exit()
        else:
            imgex = util.extractspectrum(img, dv, inst, _interactive, 'obj')
            print('\n### applying wavelength solution')
            print(arc_ex)
            iraf.disp(inlist=imgex, reference=arc_ex)

        result = result + [imgex] + [timg]

        # asci_files.append(imgasci)
        if not os.path.isdir(_object0 + '_ex/'):
            os.mkdir(_object0 + '_ex/')

        if not _arc_identify:
            util.delete(arcref)
        else:
            util.delete(arcfile)

        util.delete(arc_ex)
        util.delete(img)
        util.delete(imgex)
        util.delete(arcref)
        util.delete('logfile')
        #if _cosmic:
        #util.delete(img[7:])
        #util.delete("cosmic_*")

        os.system('mv ' + 'd' + imgex + ' ' + _object0 + '_ex/')

        use_sens = raw_input('Use archival flux calibration? [y]/n ')
        if use_sens != 'no':
            sensfile = inst.get('archive_sens')
            os.system('cp ' + sensfile + ' ' + _object0 + '_ex/')
            bstarfile = inst.get('archive_bstar')
            os.system('cp ' + bstarfile + ' ' + _object0 + '_ex/')

    return result
Exemplo n.º 50
0
def efoscspec1Dredu(files,
                    _interactive,
                    _ext_trace,
                    _dispersionline,
                    liststandard,
                    listatmo0,
                    _automaticex,
                    _verbose=False):
    # print "LOGX:: Entering `efoscspec1Dredu` method/function in
    # %(__file__)s" % globals()
    import ntt

    try:
        import pyfits
    except:
        from astropy.io import fits as pyfits

    import re
    import string
    import sys
    import os
    import numpy as np

    os.environ["PYRAF_BETA_STATUS"] = "1"
    _extinctdir = 'direc$standard/extinction/'
    _extinction = 'lasilla2.txt'
    _observatory = 'lasilla'
    import datetime

    now = datetime.datetime.now()
    datenow = now.strftime('20%y%m%d%H%M')
    MJDtoday = 55927 + (datetime.date.today() -
                        datetime.date(2012, 01, 01)).days
    dv = ntt.dvex()
    scal = np.pi / 180.
    _gain = ntt.util.readkey3(ntt.util.readhdr(re.sub('\n', '', files[0])),
                              'gain')
    _rdnoise = ntt.util.readkey3(ntt.util.readhdr(re.sub('\n', '', files[0])),
                                 'ron')
    std, rastd, decstd, magstd = ntt.util.readstandard(
        'standard_efosc_mab.txt')
    objectlist = {}
    for img in files:
        hdr = ntt.util.readhdr(img)
        img = re.sub('\n', '', img)
        ntt.util.correctcard(img)
        _ra = ntt.util.readkey3(hdr, 'RA')
        _dec = ntt.util.readkey3(hdr, 'DEC')
        _object = ntt.util.readkey3(hdr, 'object')
        _grism = ntt.util.readkey3(hdr, 'grism')
        _filter = ntt.util.readkey3(hdr, 'filter')
        _slit = ntt.util.readkey3(hdr, 'slit')
        dd = np.arccos(
            np.sin(_dec * scal) * np.sin(decstd * scal) +
            np.cos(_dec * scal) * np.cos(decstd * scal) * np.cos(
                (_ra - rastd) * scal)) * ((180 / np.pi) * 3600)
        if min(dd) < 100:
            _type = 'stdsens'
        else:
            _type = 'obj'
        if min(dd) < 100:
            ntt.util.updateheader(img, 0,
                                  {'stdname': [std[np.argmin(dd)], '']})
            ntt.util.updateheader(
                img, 0, {'magstd': [float(magstd[np.argmin(dd)]), '']})

        if _type not in objectlist:
            objectlist[_type] = {}
        if (_grism, _filter, _slit) not in objectlist[_type]:
            objectlist[_type][_grism, _filter, _slit] = [img]
        else:
            objectlist[_type][_grism, _filter, _slit].append(img)

    from pyraf import iraf
    iraf.noao(_doprint=0)
    iraf.imred(_doprint=0)
    iraf.specred(_doprint=0)
    iraf.imutil(_doprint=0)
    toforget = ['imutil.imcopy', 'specred.sarith', 'specred.standard']
    for t in toforget:
        iraf.unlearn(t)
    iraf.specred.verbose = 'no'
    iraf.specred.dispaxi = 2
    iraf.set(direc=ntt.__path__[0] + '/')
    sens = {}
    print objectlist
    outputfile = []
    if 'obj' in objectlist.keys():
        tpe = 'obj'
    elif 'stdsens' in objectlist.keys():
        tpe = 'stdsens'
    else:
        sys.exit('error: no objects and no standards in the list')

    for setup in objectlist[tpe]:
        extracted = []
        listatmo = []
        if setup not in sens:
            sens[setup] = []
        if tpe == 'obj':
            print '\n### setup= ', setup, '\n### objects= ', objectlist['obj'][
                setup], '\n'
            for img in objectlist['obj'][setup]:
                #              hdr=readhdr(img)
                print '\n\n### next object= ', img, ' ', ntt.util.readkey3(
                    ntt.util.readhdr(img), 'object'), '\n'
                if os.path.isfile(re.sub('.fits', '_ex.fits', img)):
                    if ntt.util.readkey3(
                            ntt.util.readhdr(re.sub('.fits', '_ex.fits', img)),
                            'quality') == 'Rapid':
                        ntt.util.delete(re.sub('.fits', '_ex.fits', img))
                imgex = ntt.util.extractspectrum(img,
                                                 dv,
                                                 _ext_trace,
                                                 _dispersionline,
                                                 _interactive,
                                                 'obj',
                                                 automaticex=_automaticex)
                if not os.path.isfile(imgex):
                    sys.exit('### error, extraction not computed')
                if not ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') and \
                        ntt.util.readkey3(ntt.util.readhdr(imgex), 'shift') != 0.0:
                    # if not readkey3(readhdr(imgex),'shift'):
                    ntt.efoscspec1Ddef.checkwavestd(imgex, _interactive)
                extracted.append(imgex)
                if imgex not in outputfile:
                    outputfile.append(imgex)
                ntt.util.updateheader(
                    imgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum ']})
                ntt.util.updateheader(
                    imgex, 0, {
                        'PRODCATG': [
                            'SCIENCE.' + ntt.util.readkey3(
                                ntt.util.readhdr(imgex), 'tech').upper(),
                            'Data product category'
                        ]
                    })
                ntt.util.updateheader(imgex, 0,
                                      {'TRACE1': [img, 'Originating file']})
                if os.path.isfile('database/ap' +
                                  re.sub('_ex.fits', '', imgex)):
                    if 'database/ap' + re.sub('_ex.fits', '',
                                              imgex) not in outputfile:
                        outputfile.append('database/ap' +
                                          re.sub('_ex.fits', '', imgex))
            print '\n### all object with this setup extracted\n'
        if liststandard:
            standardlist = liststandard
            _type = 'stdfromdreducer'
        else:
            try:
                standardlist = objectlist['stdsens'][setup]
                _type = 'stdsens'
            except:
                standardlist = ''
                _type = ''
        if _type == 'stdfromdreducer' and len(extracted) >= 1:
            _outputsens2 = ntt.util.searchsens(extracted[0], standardlist)[0]
            print '\n### using standard from reducer ' + str(_outputsens2)
        elif _type not in ['stdsens', 'stdfromdreducer'
                           ] and len(extracted) >= 1:
            _outputsens2 = ntt.util.searchsens(extracted[0], '')[0]
            os.system('cp ' + _outputsens2 + ' .')
            _outputsens2 = string.split(_outputsens2, '/')[-1]
            print '\n### no standard in the list, using standard from archive'
        else:
            for simg in standardlist:
                print '\n###  standard for setup ' + \
                      str(setup) + ' = ', simg, ' ', ntt.util.readkey3(
                          ntt.util.readhdr(simg), 'object'), '\n'
                simgex = ntt.util.extractspectrum(simg,
                                                  dv,
                                                  False,
                                                  False,
                                                  _interactive,
                                                  'std',
                                                  automaticex=_automaticex)
                ntt.util.updateheader(
                    simgex, 0, {'FILETYPE': [22107, 'extracted 1D spectrum']})
                ntt.util.updateheader(
                    simgex, 0, {
                        'PRODCATG': [
                            'SCIENCE.' + ntt.util.readkey3(
                                ntt.util.readhdr(simgex), 'tech').upper(),
                            'Data product category'
                        ]
                    })
                ntt.util.updateheader(simgex, 0,
                                      {'TRACE1': [simg, 'Originating file']})
                if not ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') and \
                        ntt.util.readkey3(ntt.util.readhdr(simgex), 'shift') != 0.0:
                    #                if not readkey3(readhdr(simgex),'shift'):
                    ntt.efoscspec1Ddef.checkwavestd(simgex, _interactive)
                atmofile = ntt.efoscspec1Ddef.telluric_atmo(
                    simgex)  # atmo file2
                ntt.util.updateheader(atmofile, 0,
                                      {'TRACE1': [simgex, 'Originating file']})
                ntt.util.updateheader(
                    atmofile, 0,
                    {'FILETYPE': [21211, 'telluric correction 1D spectrum ']})
                if tpe != 'obj' and atmofile not in outputfile:
                    outputfile.append(atmofile)
                if not listatmo0:
                    listatmo.append(atmofile)
                sens[setup].append(simgex)
                if simgex not in outputfile:
                    outputfile.append(simgex)
                if setup[0] == 'Gr13' and setup[1] == 'Free':
                    if os.path.isfile(re.sub('Free', 'GG495', simg)):
                        print '\n### extract standard frame with blocking filter to correct for second order contamination\n'
                        simg2 = re.sub('Free', 'GG495', simg)
                        simgex2 = ntt.util.extractspectrum(
                            simg2,
                            dv,
                            False,
                            False,
                            _interactive,
                            'std',
                            automaticex=_automaticex)
                        ntt.util.updateheader(
                            simgex2, 0,
                            {'FILETYPE': [22107, 'extracted 1D spectrum']})
                        ntt.util.updateheader(
                            simgex2, 0, {
                                'PRODCATG': [
                                    'SCIENCE.' + ntt.util.readkey3(
                                        ntt.util.readhdr(simgex2),
                                        'tech').upper(),
                                    'Data product category'
                                ]
                            })
                        if not ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') and \
                                ntt.util.readkey3(ntt.util.readhdr(simgex2), 'shift') != 0.0:
                            # if not readkey3(readhdr(simgex2),'shift'):
                            ntt.efoscspec1Ddef.checkwavestd(
                                simgex2, _interactive)
                        ntt.util.updateheader(
                            simgex2, 0,
                            {'TRACE1': [simg2, 'Originating file']})
            print '\n### standard available: ', sens[setup]
            if tpe == 'obj':
                if len(sens[setup]) > 1:
                    goon = 'no'
                    while goon != 'yes':
                        stdused = raw_input(
                            '\n### more than one standard for this setup, which one do you want to use ['
                            + sens[setup][0] + '] ?')
                        if not stdused:
                            stdused = sens[setup][0]
                        if os.path.isfile(stdused):
                            goon = 'yes'
                else:
                    stdused = sens[setup][0]
                stdvec = [stdused]
            else:
                stdvec = sens[setup]
            for stdused in stdvec:
                stdusedclean = re.sub('_ex', '_clean', stdused)
                ntt.util.delete(stdusedclean)
                iraf.specred.sarith(input1=stdused,
                                    op='/',
                                    input2=atmofile,
                                    output=stdusedclean,
                                    format='multispec')
                _outputsens2 = ntt.efoscspec1Ddef.sensfunction(
                    stdusedclean, 'spline3', 16, _interactive)
                ntt.util.updateheader(
                    _outputsens2, 0,
                    {'FILETYPE': [21212, 'sensitivity function']})
                ntt.util.updateheader(
                    _outputsens2, 0, {'TRACE1': [stdused, 'Originating file']})

                if setup[0] == 'Gr13' and setup[1] == 'Free':
                    if os.path.isfile(re.sub('Free', 'GG495', stdused)):
                        print '\n### compute sensitivity function of grim 13 with blocking filter ' \
                              'to correct for second order contamination \n'
                        stdused2 = re.sub('Free', 'GG495', stdused)
                        if not ntt.util.readkey3(ntt.util.readhdr(stdused2),
                                                 'STDNAME'):
                            ntt.util.updateheader(
                                stdused2, 0, {
                                    'STDNAME': [
                                        ntt.util.readkey3(
                                            ntt.util.readhdr(stdused),
                                            'STDNAME'), ''
                                    ]
                                })
                        atmofile2 = ntt.efoscspec1Ddef.telluric_atmo(
                            stdused2)  # atmo file2
                        stdusedclean2 = re.sub('_ex', '_clean', stdused2)
                        ntt.util.delete(stdusedclean2)
                        iraf.specred.sarith(input1=stdused2,
                                            op='/',
                                            input2=atmofile2,
                                            output=stdusedclean2,
                                            format='multispec')
                        _outputsens3 = ntt.efoscspec1Ddef.sensfunction(
                            stdusedclean2, 'spline3', 16, _interactive)
                        ntt.util.updateheader(
                            _outputsens3, 0,
                            {'FILETYPE': [21212, 'sensitivity function']})
                        ntt.util.updateheader(
                            _outputsens3, 0,
                            {'TRACE1': [stdused2, 'Originating file']})
                        _outputsens2 = correctsens(_outputsens2, _outputsens3)

                if _outputsens2 not in outputfile:
                    outputfile.append(_outputsens2)
        if _outputsens2 and tpe == 'obj':
            ####################################################
            for img in objectlist['obj'][setup]:  # flux calibrate 2d images
                imgd = fluxcalib2d(img, _outputsens2)
                ntt.util.updateheader(
                    imgd, 0, {
                        'FILETYPE':
                        [22209, '2D wavelength and flux calibrated spectrum ']
                    })
                ntt.util.updateheader(imgd, 0,
                                      {'TRACE1': [img, 'Originating files']})
                iraf.hedit(imgd,
                           'PRODCATG',
                           delete='yes',
                           update='yes',
                           verify='no')
                if imgd not in outputfile:
                    outputfile.append(imgd)
            ####################################################
            #    flux calib in the standard way
            if not listatmo and listatmo0:
                listatmo = listatmo0[:]
            for _imgex in extracted:
                _airmass = ntt.util.readkey3(ntt.util.readhdr(_imgex),
                                             'airmass')
                _exptime = ntt.util.readkey3(ntt.util.readhdr(_imgex),
                                             'exptime')
                _imgf = re.sub('_ex.fits', '_f.fits', _imgex)
                ntt.util.delete(_imgf)
                qqq = iraf.specred.calibrate(input=_imgex,
                                             output=_imgf,
                                             sensiti=_outputsens2,
                                             extinct='yes',
                                             flux='yes',
                                             extinction=_extinctdir +
                                             _extinction,
                                             observatory=_observatory,
                                             airmass=_airmass,
                                             ignorea='yes',
                                             exptime=_exptime,
                                             fnu='no')
                hedvec = {
                    'SENSFUN': [_outputsens2, ''],
                    'FILETYPE':
                    [22208, '1D wavelength and flux calibrated spectrum', ''],
                    #                     'SNR':[ntt.util.StoN(_imgf,50),'Average signal to noise ratio per pixel'],
                    'SNR': [
                        ntt.util.StoN2(_imgf, False),
                        'Average signal to noise ratio per pixel'
                    ],
                    'BUNIT':
                    ['erg/cm2/s/Angstrom', 'Physical unit of array values'],
                    'TRACE1': [_imgex, 'Originating file'],
                    'ASSON1': [
                        re.sub('_f.fits', '_2df.fits', _imgf),
                        'Name of associated file'
                    ],
                    'ASSOC1':
                    ['ANCILLARY.2DSPECTRUM', 'Category of associated file']
                }
                ntt.util.updateheader(_imgf, 0, hedvec)
                if _imgf not in outputfile:
                    outputfile.append(_imgf)
                if listatmo:
                    atmofile = ntt.util.searcharc(_imgex, listatmo)[0]
                    if atmofile:
                        _imge = re.sub('_f.fits', '_e.fits', _imgf)
                        ntt.util.delete(_imge)
                        iraf.specred.sarith(input1=_imgf,
                                            op='/',
                                            input2=atmofile,
                                            output=_imge,
                                            w1='INDEF',
                                            w2='INDEF',
                                            format='multispec')
                        try:
                            iraf.imutil.imcopy(input=_imgf + '[*,1,2]',
                                               output=_imge + '[*,1,2]',
                                               verbose='no')
                        except:
                            pass
                        try:
                            iraf.imutil.imcopy(input=_imgf + '[*,1,3]',
                                               output=_imge + '[*,1,3]',
                                               verbose='no')
                        except:
                            pass
                        try:
                            iraf.imutil.imcopy(input=_imgf + '[*,1,4]',
                                               output=_imge + '[*,1,4]',
                                               verbose='no')
                        except:
                            pass
                        if _imge not in outputfile:
                            outputfile.append(_imge)
                        ntt.util.updateheader(
                            _imge, 0, {
                                'FILETYPE': [
                                    22210,
                                    '1D, wave, flux calib, telluric corr.'
                                ]
                            })
                        if atmofile not in outputfile:
                            outputfile.append(atmofile)
                        ntt.util.updateheader(_imge, 0,
                                              {'ATMOFILE': [atmofile, '']})
                        ntt.util.updateheader(
                            _imge, 0, {'TRACE1': [_imgf, 'Originating file']})
                        imgin = _imge
                    else:
                        imgin = _imgf
                else:
                    imgin = _imgf
                imgasci = re.sub('.fits', '.asci', imgin)

                ntt.util.delete(imgasci)
                iraf.onedspec(_doprint=0)
                iraf.onedspec.wspectext(imgin + '[*,1,1]',
                                        imgasci,
                                        header='no')
                if imgasci not in outputfile:
                    outputfile.append(imgasci)

    print '\n### adding keywords for phase 3 ....... '
    for img in outputfile:
        if str(img)[-5:] == '.fits':
            try:
                ntt.util.phase3header(img)  # phase 3 definitions
                ntt.util.updateheader(img, 0, {'quality': ['Final', '']})
            except:
                print 'Warning: ' + img + ' is not a fits file'
            try:
                if int(re.sub('\.', '', str(pyfits.__version__))[:2]) <= 30:
                    aa = 'HIERARCH '
                else:
                    aa = ''
            except:
                aa = ''

            imm = pyfits.open(img, mode='update')
            hdr = imm[0].header
            if aa + 'ESO DPR CATG' in hdr:
                hdr.pop(aa + 'ESO DPR CATG')
            if aa + 'ESO DPR TECH' in hdr:
                hdr.pop(aa + 'ESO DPR TECH')
            if aa + 'ESO DPR TYPE' in hdr:
                hdr.pop(aa + 'ESO DPR TYPE')
            imm.flush()
            imm.close()

    print outputfile
    reduceddata = ntt.rangedata(outputfile)
    f = open(
        'logfile_spec1d_' + str(reduceddata) + '_' + str(datenow) +
        '.raw.list', 'w')
    for img in outputfile:
        try:
            f.write(ntt.util.readkey3(ntt.util.readhdr(img), 'arcfile') + '\n')
        except:
            pass
    f.close()
    return outputfile, 'logfile_spec1d_' + str(reduceddata) + '_' + str(
        datenow) + '.raw.list'
Exemplo n.º 51
0
import os

# Delete some directories/files from previous runs.

os.system("rm -rf login.cl pyraf database uparm")
os.system("mkiraf")

# Now load IRAF

import pyraf.iraf as iraf

# Load the packages we might need.

iraf.noao(_doprint=0)
iraf.onedspec(_doprint=0)
iraf.twodspec(_doprint=0)
iraf.apextract(_doprint=0)
iraf.unlearn(iraf.apall)

# The name of the science file.

filename = 'vega_9.3narrow.fit'
extracted_filename = 'vega_9.3narrow.ms.fits'
calibrated_filename = 'vega_9.3narrow.calib.fits'

# Delete previous results.

os.system("rm "+extracted_filename+" "+calibrated_filename)

# Make sure that the dispersion axis is in the header.
Exemplo n.º 52
0
def extract_spectrum(targetdir,
                     trace,
                     arcspec,
                     refspec,
                     t_nsum,
                     t_step,
                     line,
                     ylevel,
                     interactive):

    """
    Extract spectrum

    Must be in target directory.

    """

    iraf.noao(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.twodspec(_doprint=0)
    iraf.apextract(_doprint=0)

    basedir = '/data/lc585/WHT_20150331/OBS/'
    targetdir = os.path.join(basedir,targetdir,'Reduced')

    print 'Target directory is ' + targetdir
    print 'Extracting spectrum...'


    if os.path.exists( os.path.join(targetdir, 'imcomb.ms.fits') ):
        os.remove( os.path.join( targetdir, 'imcomb.ms.fits') )
        print 'Removing file ' + os.path.join( targetdir, 'imcomb.ms.fits')

    # If can't fit trace use trace from nearby object

    if trace == 'no':

        dest = os.path.join(targetdir,'database')

        if not os.path.exists(dest):
            os.makedirs(dest)

        db = os.path.join(basedir,refspec,'Reduced','database','ap_data_lc585_WHT_20150331_OBS_'+refspec+'_Reduced_imcomb')

        shutil.copy(db,dest)

        iraf.apall.setParam('references',os.path.join(basedir,refspec,'Reduced','imcomb.fit')) # List of aperture reference images

        print 'Using trace from reference spectra ' + refspec

    # Since frame is averaged I think we need to scale down read nosie but gain will stay the same.
    names = []
    for n in os.listdir(targetdir):
        if (n.endswith('.fit')) & (n.startswith('r')):
            names.append(n)

    nframes = float(len(names))

    # Doesn't seem to work if I give it absolute path to input!
    iraf.apall.setParam('input','imcomb.fit') # List of input images
    iraf.apall.setParam('output','') # List of output spectra
    iraf.apall.setParam('apertur','') # Apertures
    iraf.apall.setParam('format','multispec') # Extracted spectra format
    iraf.apall.setParam('referen','') # List of aperture reference images
    iraf.apall.setParam('profile','') # List of aperture profile images

    iraf.apall.setParam('interac',interactive) # Run task interactively?
    iraf.apall.setParam('find','no') # Find apertures?
    iraf.apall.setParam('recente','no') # Recenter apertures?
    iraf.apall.setParam('resize','no') # Resize apertures?
    iraf.apall.setParam('edit','yes') # Edit apertures?
    iraf.apall.setParam('trace',trace) # Trace apertures?
    iraf.apall.setParam('fittrac',interactive) # Fit the traced points interactively?
    iraf.apall.setParam('extract','yes') # Extract spectra?
    iraf.apall.setParam('extras','yes') # Extract sky, sigma, etc.?
    iraf.apall.setParam('review',interactive) # Review extractions?

    iraf.apall.setParam('line',line) # Dispersion line
    iraf.apall.setParam('nsum',20) # Number of dispersion lines to sum or median

                                # DEFAULT APERTURE PARAMETERS

    iraf.apall.setParam('lower',-5.) # Lower aperture limit relative to center
    iraf.apall.setParam('upper',5.) # Upper aperture limit relative to center
    iraf.apall.setParam('apidtab','') # Aperture ID table (optional)

                                # DEFAULT BACKGROUND PARAMETERS
    # Background is now a constant at each wavelength
    iraf.apall.setParam('b_funct','chebyshev') # Background function
    iraf.apall.setParam('b_order',1) # Background function order
    iraf.apall.setParam('b_sampl','-10:-6,6:10') # Background sample regions
    iraf.apall.setParam('b_naver',-3) # Background average or median
    iraf.apall.setParam('b_niter',2) # Background rejection iterations
    iraf.apall.setParam('b_low_r',3.) # Background lower rejection sigma
    iraf.apall.setParam('b_high_',3.) # Background upper rejection sigma
    iraf.apall.setParam('b_grow',0.) # Background rejection growing radius

                                # APERTURE CENTERING PARAMETERS

    iraf.apall.setParam('width',5.) # Profile centering width
    iraf.apall.setParam('radius',10.) # Profile centering radius
    iraf.apall.setParam('thresho',0.) # Detection threshold for profile centering

                                # AUTOMATIC FINDING AND ORDERING PARAMETERS

    iraf.apall.setParam('nfind','') # Number of apertures to be found automatically
    iraf.apall.setParam('minsep',5.) # Minimum separation between spectra
    iraf.apall.setParam('maxsep',100000.) # Maximum separation between spectra
    iraf.apall.setParam('order','increasing') # Order of apertures

                                # RECENTERING PARAMETERS

    iraf.apall.setParam('aprecen','') # Apertures for recentering calculation
    iraf.apall.setParam('npeaks','INDEF') # Select brightest peaks
    iraf.apall.setParam('shift','yes') # Use average shift instead of recentering?

                                # RESIZING PARAMETERS

    iraf.apall.setParam('llimit','INDEF') # Lower aperture limit relative to center
    iraf.apall.setParam('ulimit','INDEF') # Upper aperture limit relative to center
    iraf.apall.setParam('ylevel',0.2) # Fraction of peak or intensity for automatic widt
    iraf.apall.setParam('peak','yes') # Is ylevel a fraction of the peak?
    iraf.apall.setParam('bkg','yes') # Subtract background in automatic width?
    iraf.apall.setParam('r_grow',0.) # Grow limits by this factor
    iraf.apall.setParam('avglimi','no') # Average limits over all apertures?

                                # TRACING PARAMETERS

    iraf.apall.setParam('t_nsum',20) # Number of dispersion lines to sum
    iraf.apall.setParam('t_step', 20) # Tracing step
    iraf.apall.setParam('t_nlost',3) # Number of consecutive times profile is lost befo
    iraf.apall.setParam('t_funct','spline3') # Trace fitting function
    iraf.apall.setParam('t_order',2) # Trace fitting function order
    iraf.apall.setParam('t_sampl','*') # Trace sample regions
    iraf.apall.setParam('t_naver',1) # Trace average or median
    iraf.apall.setParam('t_niter',2) # Trace rejection iterations
    iraf.apall.setParam('t_low_r',3.) # Trace lower rejection sigma
    iraf.apall.setParam('t_high_',3.) # Trace upper rejection sigma
    iraf.apall.setParam('t_grow',0.) # Trace rejection growing radius

                                # EXTRACTION PARAMETERS

    iraf.apall.setParam('backgro','none') # Background to subtract
    iraf.apall.setParam('skybox',1) # Box car smoothing length for sky
    iraf.apall.setParam('weights','variance') # Extraction weights (none|variance)
    iraf.apall.setParam('pfit','fit1d') # Profile fitting type (fit1d|fit2d)
    iraf.apall.setParam('clean','yes') # Detect and replace bad pixels?
    iraf.apall.setParam('saturat',300000.) # Saturation level
    # iraf.apall.setParam('readnoi',17.0)
    iraf.apall.setParam('readnoi',17./np.sqrt(nframes)) # Read out noise sigma (photons)
    iraf.apall.setParam('gain',4.) # Photon gain (photons/data number)
    iraf.apall.setParam('lsigma',4.) # Lower rejection threshold
    iraf.apall.setParam('usigma',4.) # Upper rejection threshold
    iraf.apall.setParam('nsubaps',1) # Number of subapertures per aperture
    iraf.apall.setParam('mode','q') # h = hidden, q = query, l = learn

    iraf.apall()

    # Now extract arc through same aperture for wavelength calibration

    print '\n' '\n' '\n'
    print 'Extracting Arc through same aperture...'

    if os.path.exists( os.path.join(targetdir,'aimcomb.fits')):
        os.remove( os.path.join(targetdir, 'aimcomb.fits') )
        print 'Removing file ' + os.path.join(targetdir, 'aimcomb.fits')


    arcspec = os.path.join(basedir,arcspec)

    iraf.apall.setParam('input', arcspec)
    iraf.apall.setParam('output', 'aimcomb')
    iraf.apall.setParam('references', 'imcomb.fit' )
    iraf.apall.setParam('recenter','no')
    iraf.apall.setParam('trace','no')
    iraf.apall.setParam('background','no')
    iraf.apall.setParam('interactive','no')

    iraf.apall()


    if os.path.exists( os.path.join(targetdir, 'imcomb+bkgd.ms.fits') ):
        os.remove( os.path.join( targetdir, 'imcomb+bkgd.ms.fits') )
        print 'Removing file ' + os.path.join( targetdir, 'imcomb+bkgd.ms.fits')


    iraf.apall.setParam('input','imcomb+bkgd.fit') # List of input images
    iraf.apall.setParam('output','') # List of output spectra
    iraf.apall.setParam('referen','imcomb.fit') # List of aperture reference images

    iraf.apall.setParam('interac','yes') # Run task interactively?
    iraf.apall.setParam('find','yes') # Find apertures?
    iraf.apall.setParam('recenter','no') # Recenter apertures?
    iraf.apall.setParam('resize','no') # Resize apertures?
    iraf.apall.setParam('edit','yes') # Edit apertures?
    iraf.apall.setParam('trace','no') # Trace apertures?
    iraf.apall.setParam('fittrac',interactive) # Fit the traced points interactively?
    iraf.apall.setParam('extract','yes') # Extract spectra?
    iraf.apall.setParam('extras','yes') # Extract sky, sigma, etc.?
    iraf.apall.setParam('review','yes') # Review extractions?

                                # DEFAULT BACKGROUND PARAMETERS
    # Background is now a constant at each wavelength
    iraf.apall.setParam('b_funct','chebyshev') # Background function
    iraf.apall.setParam('b_order',1) # Background function order
    iraf.apall.setParam('b_sampl','-10:-6,6:10') # Background sample regions
    iraf.apall.setParam('b_naver',-3) # Background average or median
    iraf.apall.setParam('b_niter',2) # Background rejection iterations
    iraf.apall.setParam('b_low_r',3.) # Background lower rejection sigma
    iraf.apall.setParam('b_high_',3.) # Background upper rejection sigma
    iraf.apall.setParam('b_grow',0.) # Background rejection growing radius

                                # EXTRACTION PARAMETERS

    # before i wasn't dividing by the square root of the frames, but surely this must be true if I'm taking the average

    iraf.apall.setParam('backgro','median') # Background to subtract
    iraf.apall.setParam('skybox',1) # Box car smoothing length for sky
    iraf.apall.setParam('weights','variance') # Extraction weights (none|variance)
    iraf.apall.setParam('pfit','fit1d') # Profile fitting type (fit1d|fit2d)
    iraf.apall.setParam('clean','yes') # Detect and replace bad pixels?
    iraf.apall.setParam('saturat',300000.) # Saturation level
    # iraf.apall.setParam('readnoi',17.0)
    iraf.apall.setParam('readnoi',17.0/np.sqrt(nframes)) # Read out noise sigma (photons)
    iraf.apall.setParam('gain',4.) # Photon gain (photons/data number)
    iraf.apall.setParam('lsigma',4.) # Lower rejection threshold
    iraf.apall.setParam('usigma',4.) # Upper rejection threshold
    iraf.apall.setParam('nsubaps',1) # Number of subapertures per aperture

    iraf.apall()

    hdulist = fits.open(os.path.join(targetdir, 'imcomb+bkgd.ms.fits'))
    sigma = hdulist[0].data[3,0,:]
    hdulist.close()

    hdulist = fits.open(os.path.join(targetdir, 'imcomb.ms.fits'), mode='update')
    hdulist[0].data[2,0,:] = sigma
    hdulist.flush()
    hdulist.close()

    return None