Пример #1
0
def read_hds2d(fitsecf,blazedf,wavlim=None):
    import os

    fitsecf_combined=fitsecf.replace(".fits","_c.fits")
    blazedf_combined=blazedf.replace(".fits","_c.fits")

    if not os.path.isfile(fitsecf_combined):
        from pyraf import iraf
        iraf.scombine(input=fitsecf,output=fitsecf_combined,combine="sum",group="images")

    wav,data,header=read_hds_ecf(fitsecf_combined)#,wavlim=[5140,5200])
        
    try: 
        if not os.path.isfile(blazedf_combined):
            from pyraf import iraf
            iraf.scombine(input=blazedf,output=blazedf_combined,combine="sum",group="images")
        bwav,bdata,header_blaze=read_hds_ecf(blazedf_combined)#,wavlim=[5140,5200])
        normspec=data/bdata
    except:
        normspec=data ###TEST####
    
    if wavlim:
        ind=np.searchsorted(wav,wavlim)
        return wav[ind[0]:ind[1]], normspec[ind[0]:ind[1]], np.sqrt(data)[ind[0]:ind[1]], header
    else:
        return wav, normspec, np.sqrt(data), header
Пример #2
0
def run_scombine(listin, fn):
    namefil = []
    namelist = open(listin, "r")
    for cols in (raw.strip().split() for raw in namelist):
        namefil.append(cols[0])
    namelist.close()
    obs = []
    count = -1
    for i in range(len(namefil)):
        if namefil[i - 1][0:2] != namefil[i][0:2]:
            obs.append([])
            count += 1
        obs[count].append(namefil[i])
    spec = []
    for i in range(len(obs[0])):
        spec.append([])
        for j in range(len(obs)):
            spec[i].append(obs[j][i])
    iraf.noao()
    iraf.onedspec()
    for i in range(len(spec)):
        temp = open('temp_list', 'w')
        for j in range(len(spec[i])):
            temp.write(spec[i][j] + '\n')
        temp.close()
        if (spec[i][0][-6] == 'L') or (spec[i][0][-6] == 'U'):
            name_index = '0_' + spec[i][0][-6]
        else:
            name_index = spec[i][0][-6] + '_' + spec[i][0][-8]
        if len(fibnum) != 0:
            newname = ob_id[fibnum.index(name_index[0])] + '_' + name_index[-1]
            name_index = newname
        iraf.scombine('@temp_list', name_index, logfile='combine_log')
        fn.append(name_index)
Пример #3
0
def spec_combine(red,
                 blue,
                 cname,
                 matchw1=5400,
                 matchw2=5500,
                 outw1=3500,
                 outw2=10000):
    '''Combines blue and red spectra into single composite'''

    iraf.scombine("%s,%s" % (red, blue),
                  cname,
                  combine="average",
                  reject="avsigclip",
                  w1=outw1,
                  w2=outw2,
                  dw=INDEF,
                  nw=INDEF,
                  scale="median",
                  zero="none",
                  weight="none",
                  sample="%i:%i" % (matchw1, matchw2))

    #iraf.sarith(red, '/', blue, 'temp2', w1=matchw1, w2=matchw2)
    #iraf.iterstat('temp2[*,1,1]', nsigrej=5, maxiter=3, prin=no, verbose=no)
    #mean=float(iraf.iterstat.mean)
    #iraf.sarith(red, '/', mean, 'temp3')
    #iraf.scopy('temp3', 'red', w1=matchw1, w2=outw2)
    #iraf.scopy(blue, 'blue', w1=outw1, w2=matchw2)
    #iraf.scombine('red,blue', cname, w1=outw1, w2=outw2)

    return
Пример #4
0
def scombine(inp, out, apt = '1'):
    """
    Combine spectra.
    """
    iraf.scombine.input = inp
    iraf.scombine.output = out
    iraf.scombine.apertures = apt
    iraf.scombine(mode='h')
Пример #5
0
def stitch():
    #Find the mean 3100 and 3125 of blue and uv
    uvhdu = pyfits.open('13dh_uv.fits')
    uvlam = fitshdr_to_wave(uvhdu[0].header)
    w = np.logical_and(uvlam > 3050, uvlam < 3100)
    uvmean = np.median(uvhdu[0].data[1,0,w])
    uvhdu.close()
    print("uv mean %e"%uvmean)


    bluehdu = pyfits.open('13dh_blue.fits')
    bluelam = fitshdr_to_wave(bluehdu[0].header)
    w = np.logical_and(bluelam > 3050, bluelam < 3100)
    blueuvmean = np.median(bluehdu[0].data[1,0,w])
    print("blue mean uv %e"%blueuvmean)

    # Find mean of red and blue between 5375 and 5600
    w = np.logical_and(bluelam > 5375, bluelam < 5600)
    blueredmean = bluehdu[0].data[1,0,w].mean()
    bluehdu.close()
    print("blue mean red %e"%blueredmean)

    redhdu = pyfits.open('13dh_red.fits')
    redlam = fitshdr_to_wave(redhdu[0].header)
    w = np.logical_and(redlam > 5375, redlam < 5600)
    redmean = redhdu[0].data[1,0,w].mean()
    redhdu.close()
    print("red mean %e"%redmean)

    # trim uv at 3140
    iraf.unlearn(iraf.scopy)
    iraf.scopy('13dh_uv.fits', '13dh_uv_trim.fits', w1='INDEF', w2=3140, rebin='no')
    
    # trim blue at 3130 and 5600
    iraf.unlearn(iraf.scopy)
    iraf.scopy('13dh_blue.fits', '13dh_blue_trim.fits', w1=3130, w2=5600, rebin='no')
    
    # Trim red at 5375
    iraf.unlearn(iraf.scopy)
    iraf.scopy('13dh_red.fits', '13dh_red_trim.fits', w1=5375, w2='INDEF', rebin='no')

    # Copy the spectra from the second extraction to the first
    for im in ['13dh_uv_trim.fits', '13dh_blue_trim.fits', '13dh_red_trim.fits']:
        hdu = pyfits.open(im, mode='update')
        hdu[0].data[0, 0, :] = hdu[0].data[1, 0, :]
        hdu.flush()
        hdu.close()

    # Write out the scale factors
    lines = ['%f\n' % (blueuvmean / uvmean)**-1,'1.0\n','%f\n' % (blueredmean / redmean)**-1]

    f = open('scales.dat','w')
    f.writelines(lines)
    f.close()
    #Scombine with the correct scalings using average
    iraf.unlearn(iraf.scombine)
    iraf.scombine('13dh_uv_trim, 13dh_blue_trim, 13dh_red_trim', '13dh_hst.fits', scale='@scales.dat')
    return
Пример #6
0
def combine_err(spectra_list, weight_file, outputname):
    '''Combine two or more multispec error images using **scombine**.

    The input weight file should be the weights used to combine the actual
    data spectra. Error propagation is performed by

    .. math::
       \delta C = \sqrt{\sum_i(E_i/w_i)^2},

    where :math:`E` is an error spectrum (.me_rf_lin.fits), :math:`w` is the
    weight of the corresponding data spectrum, and dC is the error on the
    combined data spectra.

    Note that internally (in both this function and **scombine**) the weights
    are normalized to a unity sum, which avoids the need to keep track of the
    sum of the squares of the weights.

    When combining images that are not on exactly the same wavelength grid
    **scombine** will interpolate all spectra to have the same wavelengths as
    the first image. A simplifying assumption made by this function is that
    the interpolation is essentially linear. See the documentation for
    :func:`dispcor_err` for more information.

    '''
    print 'Combining:'
    for s in spectra_list:
        print '\t{}'.format(s)
    print 'Into {} using weights in {}'.format(outputname, weight_file)

    #Read in weights
    sq_weight_file = 'tmp_sq{}'.format(weight_file)
    w = np.loadtxt(weight_file)
    sqw = w**2
    sqw /= np.sum(sqw)
    np.savetxt(sq_weight_file, sqw, fmt='%5.4f')

    #Construct list of squared outputs
    sqlist = []
    for spectrum in spectra_list:
        tmpsq = 'tmp_sq{}'.format(spectrum)
        pow_image(spectrum, tmpsq, 2.)
        sqlist.append(tmpsq)

    tmpoutput = 'tmp_sq{}'.format(outputname)
    iraf.scombine(','.join(sqlist),
                  tmpoutput,
                  logfile='scombine_err.log',
                  weight='@{}'.format(sq_weight_file))

    pow_image(tmpoutput, outputname, 0.5)
    print 'Cleaning intermediates'
    os.system('rm tmp_sq*')

    return 0
Пример #7
0
def combine_err(spectra_list, weight_file, outputname):
    '''Combine two or more multispec error images using **scombine**.

    The input weight file should be the weights used to combine the actual
    data spectra. Error propagation is performed by

    .. math::
       \delta C = \sqrt{\sum_i(E_i/w_i)^2},

    where :math:`E` is an error spectrum (.me_rf_lin.fits), :math:`w` is the
    weight of the corresponding data spectrum, and dC is the error on the
    combined data spectra.

    Note that internally (in both this function and **scombine**) the weights
    are normalized to a unity sum, which avoids the need to keep track of the
    sum of the squares of the weights.

    When combining images that are not on exactly the same wavelength grid
    **scombine** will interpolate all spectra to have the same wavelengths as
    the first image. A simplifying assumption made by this function is that
    the interpolation is essentially linear. See the documentation for
    :func:`dispcor_err` for more information.

    '''
    print 'Combining:'
    for s in spectra_list: print '\t{}'.format(s)
    print 'Into {} using weights in {}'.format(outputname, weight_file)

    #Read in weights
    sq_weight_file = 'tmp_sq{}'.format(weight_file)
    w = np.loadtxt(weight_file)
    sqw = w**2
    sqw /= np.sum(sqw)
    np.savetxt(sq_weight_file,sqw,fmt='%5.4f')

    #Construct list of squared outputs
    sqlist = []
    for spectrum in spectra_list:
        tmpsq = 'tmp_sq{}'.format(spectrum)
        pow_image(spectrum,tmpsq,2.)
        sqlist.append(tmpsq)

    tmpoutput = 'tmp_sq{}'.format(outputname)
    iraf.scombine(','.join(sqlist),
                  tmpoutput,
                  logfile='scombine_err.log',
                  weight='@{}'.format(sq_weight_file))

    pow_image(tmpoutput, outputname, 0.5)
    print 'Cleaning intermediates'
    os.system('rm tmp_sq*')

    return 0
Пример #8
0
def speccombine(fs, outfile):
    nsteps = 8001
    lamgrid = np.linspace(3000.0, 11000.0, nsteps)

    nfs = len(fs)
    # for each aperture
    # get all of the science images
    specs = np.zeros((nfs, nsteps))
    specerrs = np.zeros((nfs, nsteps))
    for i, f in enumerate(fs):
        hdu = pyfits.open(f)
        lam = fitshdr_to_wave(hdu[0].header.copy())

        # interpolate each spectrum onto a common wavelength scale

        specs[i] = np.interp(lamgrid, lam, hdu[0].data,
                             left=0.0, right=0.0)
        # Also calculate the errors. Right now we assume that the variances
        # interpolate linearly. This is not strictly correct but it should be
        # close. Also we don't include terms in the variance for the
        # uncertainty in the wavelength solution.
        specerrs[i] = 0.1 * specs[i]

    # minimize the chi^2 given free parameters are multiplicative factors
    # We could use linear or quadratic, but for now assume constant
    # Assume 3 chips for now
    p0 = np.ones(nfs / 3)

    results = optimize.minimize(combine_spec_chi2, p0,
                                args=(lamgrid, specs, specerrs),
                                method='Nelder-Mead',
                                options={'maxfev': 1e5, 'maxiter': 1e5, 'ftol':1e-5})

    # write the best fit parameters into the headers of the files
    # Dump the list of spectra into a string that iraf can handle
    iraf_filelist = str(fs).replace('[', '').replace(']', '').replace("'", '') #.replace(',', '[SCI],')
    #iraf_filelist += '[SCI]'

    # write the best fit results into a file
    lines = []

    for p in np.repeat(results['x'], 3):
        lines.append('%f\n' % (1.0 / p))
    f = open('scales.dat', 'w')
    f.writelines(lines)
    f.close()
    # run scombine after multiplying the spectra by the best fit parameters
    if os.path.exists(outfile):
        os.remove(outfile)
    iraf.unlearn(iraf.scombine)
    iraf.scombine(iraf_filelist, outfile, scale='@scales.dat',
                  reject='avsigclip', lthreshold='INDEF', w1=bluecut)
def combine_normalize_images(WORK_DIR, combine=False):
  
  if combine:
    print '\n + Combine images\n'
    try: 
        os.remove('combined_sum.fits')       
    except: pass 
    iraf.scombine(input=','.join([obj+'.ec.vh' for obj in observations[WORK_DIR]['objects']]), output='combined_sum', group='apertures', combine='sum', reject='none', Stdout="/dev/null")

  # normalize spectra
  # This step will require you to manually normalize all the spectra.
  print '\n + Normalize spectra\n'

  if combine:
    try: 
        os.remove('combined_sum_cone_rv_echellet.fits')       
    except: pass 
    iraf.continuum(input='combined_sum', output='combined_sum_cont', type='fit', replace='no', listonly='no', functio='cheb', order=13, low_rej=2, high_rej=3, naverag=-3, niter=9, interac='no', markrej='no')
  else:  
    for obj in observations[WORK_DIR]['objects']:
      try: 
        os.remove(obj+'_cont.fits')
        os.remove(obj+'_vh_norm.fits')
      except: pass 
      iraf.continuum(input=obj+'.ec.vh', output=obj+'_cont', type='fit', replace='no', listonly='no', functio='cheb', order=13, low_rej=2, high_rej=3, naverag=-3, niter=9, interac='no', markrej='no', ask='yes')
      iraf.sarith(input1=obj+'.ec.vh', op='/', input2=obj+'_cont', output=obj+'_vh_norm', format='multispec', Stdout="/dev/null")

  #combine apertures
  print '\n + Combine apertures\n'

  if combine:
    try: 
        os.remove('combined_sum_data.fits')       
        os.remove('combined_sum_norm.fits')    
        os.remove('combined_final.0001.fits')       
    except: pass
    iraf.scombine(input='combined_sum', output='combined_sum_data', group='all', combine='sum', reject='none', Stdout="/dev/null")
    iraf.scombine(input='combined_sum_cont', output='combined_sum_norm', group='all', combine='sum', reject='none', Stdout="/dev/null")

    iraf.sarith(input1='combined_sum_data', op='/', input2='combined_sum_norm', output='combined_final', format='onedspec', Stdout="/dev/null")
  else:  
    for obj in observations[WORK_DIR]['objects']:
      try: 
        os.remove(obj+'_data1D.fits')
        os.remove(obj+'_cont1D.fits')
        os.remove(obj+'_1D_vh_norm.0001.fits')   
        os.remove(obj+'_1D_vh_norm.fits')        
      except: pass 
      iraf.scombine(input=obj+'.ec.vh', output=obj+'_data1D', group='all', combine='sum', reject='none', Stdout="/dev/null")
      iraf.scombine(input=obj+'_cont', output=obj+'_cont1D', group='all', combine='sum', reject='none', Stdout="/dev/null")

      iraf.sarith(input1=obj+'_data1D', op='/', input2=obj+'_cont1D', output=obj+'_1D_vh_norm', format='onedspec', Stdout="/dev/null")
Пример #10
0
def speccombine(fs, outfile):
    nsteps = 8001
    lamgrid = np.linspace(3000.0, 11000.0, nsteps)

    nfs = len(fs)
    # for each aperture
    # get all of the science images
    specs = np.zeros((nfs, nsteps))
    specerrs = np.zeros((nfs, nsteps))
    for i, f in enumerate(fs):
        hdu = fits.open(f)
        lam = fitshdr_to_wave(hdu[0].header.copy())

        # interpolate each spectrum onto a common wavelength scale

        specs[i] = np.interp(lamgrid, lam, hdu[0].data,
                             left=0.0, right=0.0)
        # Also calculate the errors. Right now we assume that the variances
        # interpolate linearly. This is not strictly correct but it should be
        # close. Also we don't include terms in the variance for the
        # uncertainty in the wavelength solution.
        specerrs[i] = 0.1 * specs[i]

    # minimize the chi^2 given free parameters are multiplicative factors
    # We could use linear or quadratic, but for now assume constant
    # Assume 3 chips for now
    p0 = np.ones(nfs / 3)

    results = optimize.minimize(combine_spec_chi2, p0,
                                args=(lamgrid, specs, specerrs),
                                method='Nelder-Mead',
                                options={'maxfev': 1e5, 'maxiter': 1e5, 'ftol':1e-5})

    # write the best fit parameters into the headers of the files
    # Dump the list of spectra into a string that iraf can handle
    iraf_filelist = str(fs).replace('[', '').replace(']', '').replace("'", '') #.replace(',', '[SCI],')
    #iraf_filelist += '[SCI]'

    # write the best fit results into a file
    lines = []

    for p in np.repeat(results['x'], 3):
        lines.append('%f\n' % (1.0 / p))
    f = open('scales.dat', 'w')
    f.writelines(lines)
    f.close()
    # run scombine after multiplying the spectra by the best fit parameters
    if os.path.exists(outfile):
        os.remove(outfile)
    iraf.unlearn(iraf.scombine)
    iraf.scombine(iraf_filelist, outfile, scale='@scales.dat',
                  reject='avsigclip', lthreshold='INDEF', w1=bluecut)
Пример #11
0
def scombine(fstr, oname, combine='sum'):
    """
    Call iraf command scombine, generate a combined spectrum file.
    If the file already exist, this function will delete the old one.
    fstr : input string, like fstr=abc.fits,abd.fits,abe.fits
    type : string
    oname : output file name
    type : string
    combine : the spectrum combine method, the method include
    'average', 'median', 'sum', default='sum'.
    type : string
    """
    if os.path.isfile(oname):
        print('remove file ' + oname)
        os.remove(oname)
    iraf.scombine(input=fstr,
                  output=oname,
                  noutput='',
                  logfile='STDOUT',
                  apertures='',
                  group='apertures',
                  combine=combine,
                  reject='none',
                  first='No',
                  w1='INDEF',
                  w2='INDEF',
                  dw='INDEF',
                  nw='INDEF',
                  log='No',
                  scale='none',
                  zero='none',
                  weight='none',
                  sample='',
                  lthreshold='INDEF',
                  hthreshold='INDEF',
                  nlow=1,
                  nhigh=1,
                  nkeep=1,
                  mclip='Yes',
                  lsigma=3.0,
                  hsigma=3.0,
                  rdnoise='RDNOISE',
                  gain='GAIN',
                  snoise=0.0,
                  sigscale=0.1,
                  pclip=-0.5,
                  grow=0,
                  blank=1.0)
Пример #12
0
def recombine(imagelist, outputimage):
    """Recombine a group of multispec files by aperture
    
    This uses the scombine task with group set to apertures. Because the
    aperture information is preserved when doing sky subtraction all this does
    is concatenate the individual .ms files into a single file.
    
    Parameters
    ----------
    imagelist : list of str
        List of individual file names of the temporary, single-size .ms files created by skysub

    outputimage : str
        Name of the resulting image that has all apertures from the input list

    Returns
    -------
    None
        The result is a multispec file that is a concatination of all the files in the input list.

    Notes
    -----
    To work as intended, each file in the input list should contain a unique set of aperture identifications. Any repeating apertures will be averaged together, which probably isn't what you want.

    """
    print 'Combining appertures from:'
    for image in imagelist:
        print '\t' + image

    iraf.scombine(
        ','.join(imagelist),
        outputimage,
        apertures='',
        group='apertures',
        combine='average',  #These are irrelivant b/c
        reject='avsigclip',  # we aren't actually combining anything
        first='yes',  #Very important
        w1='INDEF',
        w2='INDEF',
        dw='INDEF',
        nw='INDEF',
        weight='',
        log='no',
        gain=0.438,
        rdnoise=3.9,
        logfile='spool.txt')

    return
Пример #13
0
def run_scombine(listin, fn, setup):
    namefil = []
    namelist = open(listin, "r")
    for cols in (raw.strip().split() for raw in namelist):
        namefil.append(cols[0])
    namelist.close()
    obs = []
    count = -1
    for i in range(len(namefil)):
        if namefil[i - 1][namefil[i - 1].index('/') +
                          1:][0] != namefil[i][namefil[i].index('/') + 1:][0]:
            obs.append([])
            count += 1
        obs[count].append(namefil[i])
    spec = []
    for i in range(len(obs[0])):
        spec.append([])
        for j in range(len(obs)):
            Flag = True
            try:
                val = obs[j][i]
                Flag = True
            except IndexError:
                Flag = False
            if Flag:
                spec[i].append(obs[j][i])
        if len(spec[i]) != len(obs):
            spec.pop(i)
    iraf.noao()
    iraf.onedspec()
    for i in range(len(spec)):
        temp = open(setup + '/temp_list', 'w')
        for j in range(len(spec[i])):
            temp.write(spec[i][j] + '\n')
        temp.close()
        longn = spec[i][0]
        specname = longn[(longn.index('/') +
                          1):][(longn[(longn.index('/') + 1):]).index('/') +
                               1:]
        iraf.scombine('@' + setup + '/temp_list',
                      setup + '/' + specname,
                      logfile=setup + '/combine_log')
        fn.append(specname)
Пример #14
0
def spec_combine(red, blue, cname, matchw1=5400, matchw2=5500, outw1=3500, 
                 outw2=10000):

    '''Combines blue and red spectra into single composite'''

    iraf.scombine("%s,%s" % (red, blue), cname, combine="average",
                  reject="avsigclip", w1=outw1, w2=outw2, dw=INDEF,
                  nw=INDEF, scale="median", zero="none", weight="none",
                  sample="%i:%i" % (matchw1, matchw2))

    #iraf.sarith(red, '/', blue, 'temp2', w1=matchw1, w2=matchw2)
    #iraf.iterstat('temp2[*,1,1]', nsigrej=5, maxiter=3, prin=no, verbose=no)
    #mean=float(iraf.iterstat.mean)
    #iraf.sarith(red, '/', mean, 'temp3')
    #iraf.scopy('temp3', 'red', w1=matchw1, w2=outw2)
    #iraf.scopy(blue, 'blue', w1=outw1, w2=matchw2)
    #iraf.scombine('red,blue', cname, w1=outw1, w2=outw2)

    return
Пример #15
0
def recombine(imagelist,outputimage):
    """Recombine a group of multispec files by aperture
    
    This uses the scombine task with group set to apertures. Because the
    aperture information is preserved when doing sky subtraction all this does
    is concatenate the individual .ms files into a single file.
    
    Parameters
    ----------
    imagelist : list of str
        List of individual file names of the temporary, single-size .ms files created by skysub

    outputimage : str
        Name of the resulting image that has all apertures from the input list

    Returns
    -------
    None
        The result is a multispec file that is a concatination of all the files in the input list.

    Notes
    -----
    To work as intended, each file in the input list should contain a unique set of aperture identifications. Any repeating apertures will be averaged together, which probably isn't what you want.

    """
    print 'Combining appertures from:'
    for image in imagelist: print '\t'+image

    iraf.scombine(','.join(imagelist),outputimage,
                  apertures='',
                  group='apertures',
                  combine='average', #These are irrelivant b/c
                  reject='avsigclip',# we aren't actually combining anything
                  first='yes', #Very important
                  w1='INDEF',w2='INDEF',dw='INDEF',nw='INDEF',
                  weight='',
                  log='no',
                  gain=0.438,
                  rdnoise=3.9,
                  logfile='spool.txt')

    return
Пример #16
0
def combinespecs(inputre, scale='exposure', rdnoise='rdnoise', gain='gain'):
    ''' combine two or more spectra tha matches the input regular expression
    '''

    specfiles = glob.glob(specre)
    specstring = ', '.join(specfiles)

    print 'The following spectra will be combined: '
    print specfiles

    specout = str(raw_input('Enter output file name: '))

    iraf.scombine.unlearn()
    iraf.scombine.scale = scale
    iraf.scombine.rdnoise = rdnoise
    iraf.scombine.gain = gain

    iraf.scombine(input=specstring, output=specout)

    ask = str(raw_input('Plot output with splot? Y/N: '))
    if (ask == 'y') or (ask == 'Y'):
        iraf.splot.unlearn()
        iraf.splot(specout)
Пример #17
0
def combinespecs(inputre, scale='exposure', rdnoise='rdnoise', gain='gain'):
    ''' combine two or more spectra tha matches the input regular expression
    '''

    specfiles = glob.glob(specre)
    specstring = ', '.join(specfiles)

    print 'The following spectra will be combined: '
    print specfiles

    specout = str(raw_input('Enter output file name: '))

    iraf.scombine.unlearn()
    iraf.scombine.scale = scale
    iraf.scombine.rdnoise = rdnoise
    iraf.scombine.gain = gain

    iraf.scombine(input=specstring, output=specout)

    ask = str(raw_input('Plot output with splot? Y/N: '))
    if (ask == 'y') or (ask == 'Y'):
        iraf.splot.unlearn()
        iraf.splot(specout)
Пример #18
0
import glob
# Load third-party modules
from pyraf import iraf

# choose spectra to combine
specre = str(raw_input("Enter regular expression for spectra files: "))
specfiles = glob.glob(specre)
specstring = ', '.join(specfiles)

# choose spectra output name
specout = str(raw_input("Enter output combined spectra name: "))

# unlearn previous settings
iraf.scombine.unlearn()

# setup
iraf.scombine.combine = 'median'
iraf.scombine.reject = 'sigclip'
iraf.scombine.scale = 'exposure'
iraf.scombine.rdnoise = 'rdnoise'
iraf.scombine.gain = 'gain'

# call scombine
iraf.scombine(input=specstring, output=specout)

# visualize combined spectra with splot to check
iraf.splot.unlearn()
iraf.splot(specout)

print '--- DONE ---'
Пример #19
0
 iraf.scombine(
     input = combine_list_flux,\
     output = "spec_" + file_name,\
     noutput = "",\
     logfile = "STDOUT",\
     apertures = "*",\
     group = "all",\
     combine = "average",\
     reject = "minmax",\
     first = 1,\
     w1 = spectrum_w1,\
     w2 = spectrum_w2,\
     dw = "INDEF",\
     nw = "INDEF",\
     log = 0,\
     scale = "median",\
     zero = "none",\
     weight = "median",\
     sample = sample_region,\
     nlow = 1,\
     nhigh = 1,\
     nkeep = 1,\
     mclip = 1,\
     lsigma = 2.0,\
     hsigma = 2.0,\
     rdnoise = 0,\
     gain = 1,\
     snoise = 0,\
     sigscale = 0.1,\
     pclip = -0.5,\
     grow = 0,\
     blank = 0.0,\
     mode = "al")
Пример #20
0
def deimos_extract(science, standard, dostandard=yes):

    '''Extract and flux calibrate DEIMOS spectra (assuming they have been
    processed into 2D images using deimos_pipe above).'''

    if dostandard:
        deimos_standard(standard)

    for source in science:

        images = iraffiles("%s_??_B.fits" % source)
        joinstr = ""

        for image in images:

            bimage = image.split(".")[0]; rimage = bimage[:-1] + "R"
            update_head(bimage, "FLUX_OBJ", standard)
            update_head(rimage, "FLUX_OBJ", standard)
            
            # Extract 1D spectra
            if get_head("%s.fits" % bimage, "SKYSUB"):
                iraf.apall(bimage, output="", inter=yes, find=yes, recenter=yes,
                           resize=yes, edit=yes, trace=yes, fittrace=yes, extract=yes,
                           extras=yes, review=no, background="none",
                           reference="%s_01_B" % standard)
                iraf.apall(rimage, output="", inter=yes, find=yes, recenter=yes,
                           resize=yes, edit=yes, trace=yes, fittrace=yes, extract=yes,
                           extras=yes, review=no, background="none",
                           reference="%s_01_R" % standard)
            else:
                iraf.apall(bimage, output="", inter=yes, find=yes, recenter=yes,
                           resize=yes, edit=yes, trace=yes, fittrace=yes, extract=yes,
                           extras=yes, review=no, background="fit",
                           reference="%s_01_B" % standard)
                iraf.apall(rimage, output="", inter=yes, find=yes, recenter=yes,
                           resize=yes, edit=yes, trace=yes, fittrace=yes, extract=yes,
                           extras=yes, review=no, background="fit",
                           reference="%s_01_R" % standard)

            # Normalize by the standard continuua
            iraf.sarith("%s.ms" % bimage, "/", "s%s_01_B.ms.fits" % standard,
                        "s%s.ms" % bimage)
            iraf.sarith("%s.ms" % rimage, "/", "s%s_01_R.ms.fits" % standard,
                        "s%s.ms" % rimage)

            # Telluric correct
            bstd = "%s_01_B" % standard; rstd = "%s_01_R" % standard
            iraf.telluric("s%s.ms" % bimage, "ts%s.ms" % bimage,
                          "telluric.B.%s.fits" % bstd, tweakrms=yes,
                          interactive=yes, sample='6850:6950,7575:7700')
            iraf.telluric("s%s.ms" % rimage, "ts%s.ms" % rimage,
                          "telluric.R.%s.fits" % rstd, tweakrms=yes,
                          interactive=yes, sample='6850:6950,7575:7700')
            
            # Flux calibration
            iraf.calibrate("ts%s.ms" % bimage, "fts%s.ms" % bimage, extinct=yes,
                           flux=yes, extinction="home$extinct/maunakeaextinct.dat",
                           observatory="Keck", sensitivity="%s.B.sens" % standard,
                           airmass='', exptime='')
            iraf.calibrate("ts%s.ms" % rimage, "fts%s.ms" % rimage, extinct=yes,
                           flux=yes, extinction="home$extinct/maunakeaextinct.dat",
                           observatory="Keck", sensitivity="%s.R.sens" % standard,
                           airmass='', exptime='')

            joinstr += "fts%s.ms,fts%s.ms," % (bimage, rimage)
           
        # Combine
        iraf.scombine(joinstr[:-1], "%s.ms.fits" % source, combine="average",
                      reject="avsigclip", w1=INDEF, w2=INDEF, dw=INDEF, nw=INDEF,
                      scale="none", zero="none", weight="none", lsigma=3.0,
                      hsigma=3.0, gain=CCDGAIN, rdnoise=CCDRNOISE)

        # Plot to enable final tweaks
        iraf.splot("%s.ms.fits" % source)

    return
    ## begin IRAFing

    print('\n' + 'Setting airmass...')
    iraf.setairmass('@speclist',
                    observ='lapalma',
                    ra='cat-ra',
                    dec='cat-dec',
                    equi='cat-equi',
                    st='lst',
                    ut='utstart')

    print('\n' + 'Combining spectra...')
    iraf.scombine(input='@speclist',
                  output='allspec',
                  group='all',
                  combine='median',
                  gain=2.45,
                  reject='crreject',
                  lthresh=1e-30,
                  hthresh=hthresh2)

    print('\n' + 'Applying flux calibration to sensitivity function...')
    iraf.calibrate(
        '@speclist',
        'allspeccal',
        obs='lapalma',
        sens=sensfile,
        extinct='no',
        ignoreaps='yes'
    )  ## If extinct='yes', try extinction ='onedstds$ctioextinct.dat'

    print('\n' +
Пример #22
0
def stitch_flats(outputnames, pivots, outstring):
    """Take a list of multispec files and a list of pivots and stitch together a master flat.

    The pivot values are inclusive, so if pivots = [72] then the master flat will contain fibers 1 - 72 from flat #1 and 73 - 109 from flat #2.

    Parameters
    ----------
    
    outputnames : list of str
        Names of the multispec files to stitch together
    
    pivots : list of int
        The fibers that form the borders of the stitch. If outputnames is length N, pivots must be length N - 1
    
    outstring : str
        The special, IRAF scrunch string used to identify intermediate files associated with a **dohydra** run

    Returns
    -------
    
    mastername : str
        The name of the stitched master multispec flat

    Notes
    -----
    The specifics of outstring depend on system and IRAF distribution. See :meth:`GradPak_flatfu.get_scrunch`

    """
    pivots = [0] + pivots + [109]

    tmpfiles = []
    print 'Extracting flat apertures...'
    for i, flat in enumerate(outputnames):
        print '\ttaking {} from {} to {}'.format(flat, pivots[i] + 1,
                                                 pivots[i + 1])
        name = 'tmp{}'.format(flat)
        iraf.scopy(flat,
                   name,
                   apertur='{}-{}'.format(pivots[i] + 1, pivots[i + 1]),
                   w1='INDEF',
                   w2='INDEF',
                   format='multispec',
                   verbose=False)
        tmpfiles.append(name)

    mastername = 'dFlat_master{}.ms.fits'.format(outstring)
    print 'Stitching together master flat {}'.format(mastername)

    iraf.scombine(','.join(tmpfiles),
                  mastername,
                  apertur='',
                  group='apertures',
                  first=True,
                  w1='INDEF',
                  w2='INDEF',
                  dw='INDEF',
                  nw='INDEF',
                  log=False,
                  scale='none',
                  zero='none',
                  weight='none',
                  logfile='flatfu.log')

    for tmp in tmpfiles:
        os.remove(tmp)
    return mastername
Пример #23
0
print smooth_files_list
iraf.scombine(
    input = smooth_files_list,\
    output = "master_smooth.fits",\
    apertures = "",\
    group = "all",\
    combine = "average",\
    reject = "none",\
    first = 1,\
    w1 = wave1,\
    w2 = wave2,\
    dw = "INDEF",\
    nw = "INDEF",\
    log = 0,\
    scale = "median",\
    zero = "none",\
    weight = "median",\
    sample = "",\
    lthreshold="INDEF",\
    hthreshold="INDEF",\
    nlow = 1,\
    nhigh = 1,\
    nkeep = 1,\
    mclip = 0,\
    lsigma = 0,\
    hsigma = 0,\
    rdnoise = 0,\
    gain = 1,\
    snoise = 0,\
    pclip = -0.5)
Пример #24
0
# Order 92 of the new grating was the only one that gave issues. CA H or K.
#if grating == 'new':
#    iraf.continuum(input='@targets_extracted',
#                   output='fitcont//@targets_extracted',lines='92',type='fit',
#                   replace=no,wavescale=yes,logscale=no,listonly=no,
#                   interactive=no,sample='3860:3920,3945:3960',naverage=-3,
#                   function='spline3',order=5,niterate=10,low_reject=1.5,
#                   high_reject=5.0,markrej=no,grow=0,override=yes)
print 'Combining spectra...'
iraf.scombine('@targets_extracted',
              'actual_flux//@targets_extracted',
              group='images',
              combine='average',
              reject='none',
              first=no,
              w1=INDEF,
              w2=INDEF,
              dw=INDEF,
              nw=INDEF,
              log=no,
              scale='none',
              weight='none',
              zero='none')

print 'Combining spectra...'
iraf.scombine('@targets_extracted',
              'sum//@targets_extracted',
              group='images',
              combine='sum',
              reject='none',
              first=no,
              w1=INDEF,
Пример #25
0
def salt_scombine(objlist):

    obj = np.loadtxt(objlist, dtype="str", unpack=True, ndmin=1)
    objlist1d = objlist + '1d'
    objj = os.path.splitext(obj[0])[0]
    if os.path.isfile(objj + 'wsf.fits'):
        os.system('sed s/.fits/wsf1d.fits/ ' + objlist + ' > ' + objlist1d)
        objnofits = string.split(obj[0], sep='.')[0] + 'wsf'
        os.system('sed s/.fits/wsf1d_var.fits/ ' + objlist + ' > templist')
    else:
        os.system('sed s/.fits/ws1d.fits/ ' + objlist + ' > ' + objlist1d)
        objnofits = string.split(obj[0], sep='.')[0] + 'ws'
        os.system('sed s/.fits/ws1d_var.fits/ ' + objlist + ' > templist')

    low_ap = float(
        rt.getsh("grep -i 'low\t' database/ap" + objnofits +
                 " |  awk '{print $3}'"))
    high_ap = float(
        rt.getsh("grep -i 'high\t' database/ap" + objnofits +
                 " |  awk '{print $3}'"))

    apradius = abs(low_ap) + abs(high_ap)
    readnoise = 2.45 * np.sqrt(apradius)
    print "ggg = %s" % readnoise

    iraf.scombine.combine = 'average'
    iraf.scombine.rdnoise = readnoise
    iraf.scombine.reject = 'ccdclip'
    iraf.scombine.scale = 'median'

    outnam = ''.join(rt.header(obj[0], 'OBJECT').split()).lower()
    outname = outnam + '_' + objlist
    if os.path.isfile(outname + '.fits'):
        os.system('rm ' + outname + '.fits')

    inname = str('@' + objlist1d)

    iraf.scombine(input=inname, output=outname, first='yes')

    tempfile = 'temp.fits'
    if os.path.isfile(tempfile):
        os.system('rm ' + tempfile)

    iraf.imsum('@templist', tempfile)
    nspec = len(obj)
    outname_var = outname + '_var'
    if os.path.isfile(outname_var + '.fits'):
        os.system('rm ' + outname_var + '.fits')

    nspec = nspec * nspec

    iraf.imarith(tempfile, '/', nspec, outname_var)

    os.system('rm *temp.fits')
    os.system('rm templist')
    iraf.dispcor(outname_var, output='', table=outname)
    outnametxt = outname + '.txt'
    outname_vartxt = outname_var + '.txt'
    iraf.wspectext(outname, outnametxt, header='no')
    iraf.wspectext(outname_var, outname_vartxt, header='no')

    rssdate = rt.header(obj[0], 'DATE-OBS')
    rssdate = string.replace(rssdate, '-', '')
    pastefinal = outname + '_final_' + rssdate + '.txt'
    os.system('paste ' + outnametxt + ' ' + outname_vartxt + '  > ' +
              pastefinal)
# Load third-party modules
from pyraf import iraf


# choose spectra to combine
specre = str(raw_input("Enter regular expression for spectra files: "))
specfiles = glob.glob(specre)
specstring = ", ".join(specfiles)

# choose spectra output name
specout = str(raw_input("Enter output combined spectra name: "))

# unlearn previous settings
iraf.scombine.unlearn()

# setup
iraf.scombine.combine = "median"
iraf.scombine.reject = "sigclip"
iraf.scombine.scale = "exposure"
iraf.scombine.rdnoise = "rdnoise"
iraf.scombine.gain = "gain"

# call scombine
iraf.scombine(input=specstring, output=specout)

# visualize combined spectra with splot to check
iraf.splot.unlearn()
iraf.splot(specout)

print "--- DONE ---"
Пример #27
0
Файл: SMS.py Проект: cinserra/S3
		    BBparams, covar = curve_fit(bbody,wavev,fluxv,p0=(10000,1E-16)) #intial guess
		    T= BBparams[0]
		    Area = BBparams[1]
		    print '\nBlackbody temperature observed spectrum = %.0f +\- %.0f K\n' % (T,np.sqrt(covar[0,0]))
		    bbt = 'BBobs = %.0f +\- %.0f K' % (T,np.sqrt(covar[0,0]))
		    outputname = "bbody_sn_fit.dat" #% T
		    file = open(outputname,"w")
		    file.write("# Blackbody temperature = %.0f +\- %.0f K\n" % (T,np.sqrt(covar[0,0])))
		    w,f = [],[]
		    for wav in range(900,26000):
		        file.write("%g\t%g\n" % (wav,bbody(wav,T,Area)))
		        w.append(wav)
		        f.append(bbody(wav,T,Area))

		    iraf.rspec('bbody_sn_fit.dat','bbody_sn_fit.fits', title='bbodyfit',flux='no',dtype='interp',crval1=900,cdelt1=1)
		    iraf.scombine('bbody_sn_fit.fits,sn.fits,sn.fits,sn.fits', 'bsn_combo.fits',combine='median')

		    iraf.wspec('bsn_combo.fits','bsn_combo.txt',header='no')


		    lcf = open('bsn_combo.txt','r')
		    riga = lcf.readlines()
		    lcf.close()
		    wave,flux= [],[]
		    for line in riga:
		        p = line.split()
		        if float(line.split()[0]) >= fil_obs_min and float(line.split()[0]) <= fil_obs_max: #to match the spectrum wavelegnths to those of the filter
		            wave.append(float(p[0]))
		            flux.append(float(p[1]))
		        
		    wavev = array(wave)
Пример #28
0
		    BBparams, covar = curve_fit(bbody,wavev,fluxv,p0=(10000,1E-16)) #intial guess
		    T= BBparams[0]
		    Area = BBparams[1]
		    print '\nBlackbody temperature observed spectrum = %.0f +\- %.0f K\n' % (T,np.sqrt(covar[0,0]))
		    bbt = 'BBobs = %.0f +\- %.0f K' % (T,np.sqrt(covar[0,0]))
		    outputname = "bbody_sn_fit.dat" #% T
		    file = open(outputname,"w")
		    file.write("# Blackbody temperature = %.0f +\- %.0f K\n" % (T,np.sqrt(covar[0,0])))
		    w,f = [],[]
		    for wav in range(900,26000):
		        file.write("%g\t%g\n" % (wav,bbody(wav,T,Area)))
		        w.append(wav)
		        f.append(bbody(wav,T,Area))

		    iraf.rspec('bbody_sn_fit.dat','bbody_sn_fit.fits', title='bbodyfit',flux='no',dtype='interp',crval1=900,cdelt1=1)
		    iraf.scombine('bbody_sn_fit.fits,sn.fits,sn.fits,sn.fits', 'bsn_combo.fits',combine='median')

		    iraf.wspec('bsn_combo.fits','bsn_combo.txt',header='no')

		    print '#######################################'
		    print '\033[4mSince now you are working with an hybrid spectrum+blackbody, if you want additional information, please use the normal version.\033[0m '
		    print '#######################################'

		    lcf = open('bsn_combo.txt','r')
		    riga = lcf.readlines()
		    lcf.close()
		    wave,flux= [],[]
		    for line in riga:
		        p = line.split()
		        if float(line.split()[0]) >= fil_obs_min and float(line.split()[0]) <= fil_obs_max: #to match the spectrum wavelegnths to those of the filter
		            wave.append(float(p[0]))
Пример #29
0
print '\n provide the following manually: \n'
print ptfsn2_name, ptfsn2_air, ptfsn2_exp
print '\n'

iraf.onedspec.calibrate(input=ptfsn2ms,
                        out=ptfsn2f,
                        extinction='mk_extinct.txt',
                        observatory='Keck',
                        ignoreaps=yes,
                        sensitivity='sensstar2.fits')

inputname = ptfsn1f + ',' + ptfsn2f
#Produce combined spectrum
iraf.scombine(input=inputname,
              out='finalspectrum.ms',
              reject='avsigclip',
              scale='median',
              sample='5500:6500')

l.write('calib=yes')

iraf.splot(images='finalspectrum.ms.fits')

iraf.wspectext(input='finalspectrum.ms.fits[*,1]',
               output=ptfsn1_name + '.ascii',
               header='NO')
iraf.wspectext(input='ptfsn1.f.fits[*,1]',
               output=ptfsn1_name + '_r400.ascii',
               header='NO')
iraf.wspectext(input='ptfsn2.f.fits[*,1]',
               output=ptfsn1_name + '_b600.ascii',
Пример #30
0
def deimos_extract(science, standard, dostandard=yes):
    '''Extract and flux calibrate DEIMOS spectra (assuming they have been
    processed into 2D images using deimos_pipe above).'''

    if dostandard:
        deimos_standard(standard)

    for source in science:

        images = iraffiles("%s_??_B.fits" % source)
        joinstr = ""

        for image in images:

            bimage = image.split(".")[0]
            rimage = bimage[:-1] + "R"
            update_head(bimage, "FLUX_OBJ", standard)
            update_head(rimage, "FLUX_OBJ", standard)

            # Extract 1D spectra
            if get_head("%s.fits" % bimage, "SKYSUB"):
                iraf.apall(bimage,
                           output="",
                           inter=yes,
                           find=yes,
                           recenter=yes,
                           resize=yes,
                           edit=yes,
                           trace=yes,
                           fittrace=yes,
                           extract=yes,
                           extras=yes,
                           review=no,
                           background="none",
                           reference="%s_01_B" % standard)
                iraf.apall(rimage,
                           output="",
                           inter=yes,
                           find=yes,
                           recenter=yes,
                           resize=yes,
                           edit=yes,
                           trace=yes,
                           fittrace=yes,
                           extract=yes,
                           extras=yes,
                           review=no,
                           background="none",
                           reference="%s_01_R" % standard)
            else:
                iraf.apall(bimage,
                           output="",
                           inter=yes,
                           find=yes,
                           recenter=yes,
                           resize=yes,
                           edit=yes,
                           trace=yes,
                           fittrace=yes,
                           extract=yes,
                           extras=yes,
                           review=no,
                           background="fit",
                           reference="%s_01_B" % standard)
                iraf.apall(rimage,
                           output="",
                           inter=yes,
                           find=yes,
                           recenter=yes,
                           resize=yes,
                           edit=yes,
                           trace=yes,
                           fittrace=yes,
                           extract=yes,
                           extras=yes,
                           review=no,
                           background="fit",
                           reference="%s_01_R" % standard)

            # Normalize by the standard continuua
            iraf.sarith("%s.ms" % bimage, "/", "s%s_01_B.ms.fits" % standard,
                        "s%s.ms" % bimage)
            iraf.sarith("%s.ms" % rimage, "/", "s%s_01_R.ms.fits" % standard,
                        "s%s.ms" % rimage)

            # Telluric correct
            bstd = "%s_01_B" % standard
            rstd = "%s_01_R" % standard
            iraf.telluric("s%s.ms" % bimage,
                          "ts%s.ms" % bimage,
                          "telluric.B.%s.fits" % bstd,
                          tweakrms=yes,
                          interactive=yes,
                          sample='6850:6950,7575:7700')
            iraf.telluric("s%s.ms" % rimage,
                          "ts%s.ms" % rimage,
                          "telluric.R.%s.fits" % rstd,
                          tweakrms=yes,
                          interactive=yes,
                          sample='6850:6950,7575:7700')

            # Flux calibration
            iraf.calibrate("ts%s.ms" % bimage,
                           "fts%s.ms" % bimage,
                           extinct=yes,
                           flux=yes,
                           extinction="home$extinct/maunakeaextinct.dat",
                           observatory="Keck",
                           sensitivity="%s.B.sens" % standard,
                           airmass='',
                           exptime='')
            iraf.calibrate("ts%s.ms" % rimage,
                           "fts%s.ms" % rimage,
                           extinct=yes,
                           flux=yes,
                           extinction="home$extinct/maunakeaextinct.dat",
                           observatory="Keck",
                           sensitivity="%s.R.sens" % standard,
                           airmass='',
                           exptime='')

            joinstr += "fts%s.ms,fts%s.ms," % (bimage, rimage)

        # Combine
        iraf.scombine(joinstr[:-1],
                      "%s.ms.fits" % source,
                      combine="average",
                      reject="avsigclip",
                      w1=INDEF,
                      w2=INDEF,
                      dw=INDEF,
                      nw=INDEF,
                      scale="none",
                      zero="none",
                      weight="none",
                      lsigma=3.0,
                      hsigma=3.0,
                      gain=CCDGAIN,
                      rdnoise=CCDRNOISE)

        # Plot to enable final tweaks
        iraf.splot("%s.ms.fits" % source)

    return
Пример #31
0
def normalize_and_merge(reduced_science_files):
    current_directory = os.getcwd()

    for k in range(len(reduced_science_files)):

        remove_file(current_directory, "norm.dummyI.fits")
        remove_file(current_directory, "norm.dummyIa.fits")
        remove_file(current_directory, "norm.dummyII.fits")
        remove_file(current_directory, "norm.dummyIIa.fits")
        remove_file(current_directory, "norm.dummyIII.fits")
        remove_file(current_directory, "norm.dummyIV.fits")
        remove_file(current_directory, "norm.dummyV.fits")
        remove_file(current_directory, "norm.dummy1.fits")
        remove_file(current_directory, "norm.dummy2.fits")
        remove_file(current_directory, "norm.dummy3.fits")
        remove_file(current_directory, "norm.dummy4.fits")
        remove_file(current_directory, "norm.dummy5.fits")
        remove_file(current_directory, "norm.dummy6.fits")
        remove_file(current_directory, "norm.dummy7.fits")
        remove_file(current_directory, "norm.dummy8.fits")

        ranges = np.loadtxt("ranges.lis", delimiter=",")
        aperturlist = open("test.norm.apertures.lis", "w")

        merge = reduced_science_files[k].replace(".extracted.fits",
                                                 ".norm.fits")

        iraf.scopy(reduced_science_files[k],
                   place_here("norm.dummyI.fits"),
                   apertures="5:14",
                   format="multispec")
        iraf.scopy(reduced_science_files[k],
                   place_here("norm.dummyII.fits"),
                   apertures="35:42",
                   format="multispec")

        iraf.fit1d(place_here("norm.dummyI.fits"),
                   place_here("norm.dummyIa.fits"),
                   naverage=1,
                   axis=2,
                   type="fit",
                   low_rej=1.0,
                   high_rej=2.0,
                   order=2,
                   niterate=2,
                   func="spline3",
                   sample="*")
        iraf.fit1d(place_here("norm.dummyII.fits"),
                   place_here("norm.dummyIIa.fits"),
                   naverage=1,
                   axis=2,
                   type="fit",
                   low_rej=1.0,
                   high_rej=2.0,
                   order=2,
                   niterate=2,
                   func="spline3",
                   sample="*")

        iraf.scopy(reduced_science_files[k],
                   place_here("norm.dummy1.fits"),
                   apertures="1",
                   format="multispec",
                   w1="4544")
        iraf.scopy(reduced_science_files[k],
                   place_here("norm.dummy2.fits"),
                   apertures="2",
                   format="multispec",
                   w1="4569")
        iraf.scopy(reduced_science_files[k],
                   place_here("norm.dummy3.fits"),
                   apertures="3:7",
                   format="multispec")
        iraf.scopy(place_here("norm.dummyIa.fits"),
                   place_here("norm.dummy4.fits"),
                   apertures="8:10",
                   format="multispec")
        iraf.scopy(reduced_science_files[k],
                   place_here("norm.dummy5.fits"),
                   apertures="11:37",
                   format="multispec")
        iraf.scopy(place_here("norm.dummyIIa.fits"),
                   place_here("norm.dummy6.fits"),
                   apertures="38:40",
                   format="multispec")
        iraf.scopy(reduced_science_files[k],
                   place_here("norm.dummy7.fits"),
                   apertures="41:50",
                   format="multispec")
        iraf.scopy(reduced_science_files[k],
                   place_here("norm.dummy8.fits"),
                   apertures="51",
                   format="multispec",
                   w2="7591")

        iraf.scombine("@normalization.list.lis",
                      place_here("norm.dummyIII.fits"),
                      group='apertures')

        iraf.fit1d(place_here("norm.dummyIII.fits"),
                   place_here("norm.dummyIV.fits"),
                   naverage=1,
                   axis=1,
                   type="fit",
                   low_rej=0.8,
                   high_rej=2.0,
                   order=7,
                   niterate=4,
                   func="spline3",
                   sample="*")

        iraf.sarith(reduced_science_files[k], "/",
                    place_here("norm.dummyIV.fits"),
                    place_here("norm.dummyV.fits"))

        iraf.fit1d(place_here("norm.dummyV.fits"),
                   merge,
                   naverage=1,
                   axis=1,
                   type="ratio",
                   low_rej=0.2,
                   high_rej=2.0,
                   order=1,
                   niterate=4,
                   func="chebyshev",
                   sample="*")

        iraf.hedit(merge, "INSTRUME", 'TLS-echelle')

        for i in range(len(ranges)):
            apertures = int(ranges[i][0])

            new_name = merge.replace(".fits", "." + str(apertures) + ".fits")

            input1 = os.path.join(current_directory, merge)
            output = os.path.join(current_directory, new_name)

            iraf.scopy(input1,
                       output,
                       w1=ranges[i][1],
                       w2=ranges[i][2],
                       apertur=apertures,
                       format="multispec")

            aperturlist.write(new_name + "\n")

        aperturlist.close()

        new_name_merged = reduced_science_files[k].replace(
            ".extracted.fits", ".merged.fits")

        iraf.scombine("@test.norm.apertures.lis", new_name_merged, group='all')

        cut_for_ston = new_name_merged.replace("merged", "cut")
        iraf.scopy(new_name_merged,
                   cut_for_ston,
                   w1=5603,
                   w2=5612,
                   format="multispec",
                   apertures="")
        stddev = iraf.imstat(cut_for_ston,
                             Stdout=1,
                             fields="stddev",
                             format="no")
        ston = 1 / float(stddev[0])

        iraf.hedit(new_name, "STON", ston)

        for i in range(len(ranges)):
            apertures = int(ranges[i][0])
            os.remove(
                os.path.join(
                    merge.replace(".fits", "." + str(apertures) + ".fits")))

        os.remove(os.path.join(current_directory, "test.norm.apertures.lis"))

        remove_file(current_directory, "norm.dummyI.fits")
        remove_file(current_directory, "norm.dummyIa.fits")
        remove_file(current_directory, "norm.dummyII.fits")
        remove_file(current_directory, "norm.dummyIIa.fits")
        remove_file(current_directory, "norm.dummyIII.fits")
        remove_file(current_directory, "norm.dummyIV.fits")
        remove_file(current_directory, "norm.dummyV.fits")
        remove_file(current_directory, "norm.dummy1.fits")
        remove_file(current_directory, "norm.dummy2.fits")
        remove_file(current_directory, "norm.dummy3.fits")
        remove_file(current_directory, "norm.dummy4.fits")
        remove_file(current_directory, "norm.dummy5.fits")
        remove_file(current_directory, "norm.dummy6.fits")
        remove_file(current_directory, "norm.dummy7.fits")
        remove_file(current_directory, "norm.dummy8.fits")
    print("Success! Produced files *merged.fits, *norm.fits")
Пример #32
0
def lris_extract(science, standards, dostandard=yes):

    '''Extract and flux calibrate LRIS spectra (assuming they have been
    processed into 2D images using LRIS_pipe above).'''

    if dostandard:
        lris_standard(standards)

    for src in science:

        bimages = iraffiles("%s_??_B.fits" % src)
        rimages = iraffiles("%s_??_R.fits" % src)
        joinstr = ""

        for bimage in bimages:

            bimage = bimage.split(".")[0]
            
            # Find appropriate standard for this image
            bstd = get_head("%s.fits" % bimage, "STDNAME")
            
            # Extract 1D spectra
            if get_head("%s.fits" % bimage, "SKYSUB"):
                iraf.apall(bimage, output="", inter=yes, find=yes, recenter=yes,
                           resize=yes, edit=yes, trace=yes, fittrace=yes, extract=yes,
                           extras=yes, review=no, background="none",
                           reference=bstd)
            else:
                iraf.apall(bimage, output="", inter=yes, find=yes, recenter=yes,
                           resize=yes, edit=yes, trace=yes, fittrace=yes, extract=yes,
                           extras=yes, review=no, background="fit",
                           reference=bstd)

            # Normalize by the standard continuua
            iraf.sarith("%s.ms" % bimage, "/", "../standards/s%s.ms.fits" % bstd,
                        "s%s.ms" % bimage, w1=INDEF, w2=INDEF)            
            
            # Telluric correct
            iraf.telluric("s%s.ms" % bimage, "ts%s.ms" % bimage,
                          "../standards/telluric.%s.fits" % bstd, tweakrms=yes,
                          interactive=yes, sample='6850:6950,7575:7700,8750:9925')
            
            # Flux calibration
            iraf.calibrate("ts%s.ms" % bimage, "fts%s.ms" % bimage, extinct=yes,
                           flux=yes, extinction="home$extinct/maunakeaextinct.dat",
                           observatory="Keck", sens="../standards/%s.sens" % bstd,
                           airmass='', exptime='')

            joinstr += "fts%s.ms," % bimage

        for rimage in rimages:

            rimage = rimage.split(".")[0]
            
            # Find appropriate standard for this image
            rstd = get_head("%s.fits" % rimage, "STDNAME")
            
            # Extract 1D spectra
            if get_head("%s.fits" % rimage, "SKYSUB"):
                iraf.apall(rimage, output="", inter=yes, find=yes, recenter=yes,
                           resize=yes, edit=yes, trace=yes, fittrace=yes, extract=yes,
                           extras=yes, review=no, background="none",
                           reference=rstd)
            else:
                iraf.apall(rimage, output="", inter=yes, find=yes, recenter=yes,
                           resize=yes, edit=yes, trace=yes, fittrace=yes, extract=yes,
                           extras=yes, review=no, background="fit",
                           reference=rstd)

            # Normalize by the standard continuua
            iraf.sarith("%s.ms" % rimage, "/", "../standards/s%s.ms.fits" % rstd,
                        "s%s.ms" % rimage, w1=5500, w2=INDEF)
            
            # Telluric correct
            iraf.telluric("s%s.ms" % rimage, "ts%s.ms" % rimage,
                          "../standards/telluric.%s.fits" % rstd, tweakrms=yes,
                          interactive=yes, sample='6850:6950,7575:7700')
            
            # Flux calibration
            iraf.calibrate("ts%s.ms" % rimage, "fts%s.ms" % rimage, extinct=yes,
                           flux=yes, extinction="home$extinct/maunakeaextinct.dat",
                           observatory="Keck", sens="../standards/%s.sens" % rstd,
                           airmass='', exptime='')

            joinstr += "fts%s.ms," % rimage
           
        # Combine
        iraf.scombine(joinstr[:-1], "%s.ms.fits" % src, combine="average",
                      reject="avsigclip", w1=INDEF, w2=INDEF, dw=INDEF, nw=INDEF,
                      scale="median", zero="none", weight="none", sample="5450:5600",
                      lsigma=3.0, hsigma=3.0, gain=RCCDGAIN, rdnoise=RCCDRNOISE)

        # Plot to enable final tweaks
        iraf.splot("%s.ms.fits" % src)

    return
Пример #33
0
def lris_extract(science, standards, dostandard=yes):
    '''Extract and flux calibrate LRIS spectra (assuming they have been
    processed into 2D images using LRIS_pipe above).'''

    if dostandard:
        lris_standard(standards)

    for src in science:

        bimages = iraffiles("%s_??_B.fits" % src)
        rimages = iraffiles("%s_??_R.fits" % src)
        joinstr = ""

        for bimage in bimages:

            bimage = bimage.split(".")[0]

            # Find appropriate standard for this image
            bstd = get_head("%s.fits" % bimage, "STDNAME")

            # Extract 1D spectra
            if get_head("%s.fits" % bimage, "SKYSUB"):
                iraf.apall(bimage,
                           output="",
                           inter=yes,
                           find=yes,
                           recenter=yes,
                           resize=yes,
                           edit=yes,
                           trace=yes,
                           fittrace=yes,
                           extract=yes,
                           extras=yes,
                           review=no,
                           background="none",
                           reference=bstd)
            else:
                iraf.apall(bimage,
                           output="",
                           inter=yes,
                           find=yes,
                           recenter=yes,
                           resize=yes,
                           edit=yes,
                           trace=yes,
                           fittrace=yes,
                           extract=yes,
                           extras=yes,
                           review=no,
                           background="fit",
                           reference=bstd)

            # Normalize by the standard continuua
            iraf.sarith("%s.ms" % bimage,
                        "/",
                        "../standards/s%s.ms.fits" % bstd,
                        "s%s.ms" % bimage,
                        w1=INDEF,
                        w2=INDEF)

            # Telluric correct
            iraf.telluric("s%s.ms" % bimage,
                          "ts%s.ms" % bimage,
                          "../standards/telluric.%s.fits" % bstd,
                          tweakrms=yes,
                          interactive=yes,
                          sample='6850:6950,7575:7700,8750:9925')

            # Flux calibration
            iraf.calibrate("ts%s.ms" % bimage,
                           "fts%s.ms" % bimage,
                           extinct=yes,
                           flux=yes,
                           extinction="home$extinct/maunakeaextinct.dat",
                           observatory="Keck",
                           sens="../standards/%s.sens" % bstd,
                           airmass='',
                           exptime='')

            joinstr += "fts%s.ms," % bimage

        for rimage in rimages:

            rimage = rimage.split(".")[0]

            # Find appropriate standard for this image
            rstd = get_head("%s.fits" % rimage, "STDNAME")

            # Extract 1D spectra
            if get_head("%s.fits" % rimage, "SKYSUB"):
                iraf.apall(rimage,
                           output="",
                           inter=yes,
                           find=yes,
                           recenter=yes,
                           resize=yes,
                           edit=yes,
                           trace=yes,
                           fittrace=yes,
                           extract=yes,
                           extras=yes,
                           review=no,
                           background="none",
                           reference=rstd)
            else:
                iraf.apall(rimage,
                           output="",
                           inter=yes,
                           find=yes,
                           recenter=yes,
                           resize=yes,
                           edit=yes,
                           trace=yes,
                           fittrace=yes,
                           extract=yes,
                           extras=yes,
                           review=no,
                           background="fit",
                           reference=rstd)

            # Normalize by the standard continuua
            iraf.sarith("%s.ms" % rimage,
                        "/",
                        "../standards/s%s.ms.fits" % rstd,
                        "s%s.ms" % rimage,
                        w1=5500,
                        w2=INDEF)

            # Telluric correct
            iraf.telluric("s%s.ms" % rimage,
                          "ts%s.ms" % rimage,
                          "../standards/telluric.%s.fits" % rstd,
                          tweakrms=yes,
                          interactive=yes,
                          sample='6850:6950,7575:7700')

            # Flux calibration
            iraf.calibrate("ts%s.ms" % rimage,
                           "fts%s.ms" % rimage,
                           extinct=yes,
                           flux=yes,
                           extinction="home$extinct/maunakeaextinct.dat",
                           observatory="Keck",
                           sens="../standards/%s.sens" % rstd,
                           airmass='',
                           exptime='')

            joinstr += "fts%s.ms," % rimage

        # Combine
        iraf.scombine(joinstr[:-1],
                      "%s.ms.fits" % src,
                      combine="average",
                      reject="avsigclip",
                      w1=INDEF,
                      w2=INDEF,
                      dw=INDEF,
                      nw=INDEF,
                      scale="median",
                      zero="none",
                      weight="none",
                      sample="5450:5600",
                      lsigma=3.0,
                      hsigma=3.0,
                      gain=RCCDGAIN,
                      rdnoise=RCCDRNOISE)

        # Plot to enable final tweaks
        iraf.splot("%s.ms.fits" % src)

    return
Пример #34
0
def speccombine(fs=None):
    iraf.cd('work')
    if fs is None:
        fs = glob('trm/sci*c?.fits')
    if len(fs)==0:
        print("No flux calibrated images to combine.")
        iraf.cd('..')
        return
    #diagnostic()
    nsteps = 8001
    lamgrid = np.linspace(2000.0, 10000.0, nsteps)

    nfs = len(fs)
    # for each aperture
    # get all of the science images
    specs = np.zeros((nfs, nsteps))
    specerrs = np.zeros((nfs, nsteps))
    ap = 0
    for i, f in enumerate(fs):
        hdu = pyfits.open(f)
    #	print ('---hdu.data---')
    #	print (hdu[0].data)
    	w=WCS(f)
    # 	print ('-----w-----')
    #	print(w)
        # get the wavelengths of the pixels
        npix = hdu[0].data.shape[2]
    #	print('-----npix-----')
    #	print(npix)
        lam = w.all_pix2world(np.linspace(0, npix - 1, npix), 0, 0, 0)[0]
    #	print('-----lam-----')
    #	print(lam)
        # interpolate each spectrum onto a comman wavelength scale

        specs[i] = interp(lamgrid, lam, hdu[0].data[0][ap],
                          left=0.0, right=0.0)
        # Also calculate the errors. Right now we assume that the variances
        # interpolate linearly. This is not stricly correct but it should be
        # close. Also we don't include terms in the variance for the
        # uncertainty in the wavelength solution.
        specerrs[i] = interp(lamgrid, lam, hdu[0].data[3][ap] ** 2.0) ** 0.5
    #print ('-----specs-----')
    #print (specs)
    # minimize the chi^2 given free parameters are multiplicative factors
    # We could use linear or quadratic, but for now assume constant
    p0 = np.ones(nfs)

    results = optimize.minimize(combine_spec_chi2, p0,
                                args=(lamgrid, specs, specerrs),
                                method='Nelder-Mead',
                                options={'maxfev': 1e5, 'maxiter': 1e5})

    # write the best fit parameters into the headers of the files
    # Dump the list of spectra into a string that iraf can handle
    iraf_filelist = str(fs).replace('[', '').replace(']', '').replace("'", '')

    # write the best fit results into a file
    lines = []
    for p in results['x']:
        lines.append('%f\n' % (1.0 / p))
    f = open('flx/scales.dat', 'w')
    f.writelines(lines)
    f.close()
    # run scombine after multiplying the spectra by the best fit parameters
    combfile = 'sci_com.fits'
    if os.path.exists(combfile):
        os.remove(combfile)
    iraf.scombine(iraf_filelist, combfile, scale='@flx/scales.dat',
                  reject='avsigclip', lthreshold=-2e-16)

    # Remove the other apertures [TBD]
    # remove the sky and arc bands from the combined spectra. (or add back?? TBD)

    # remove some header keywords that don't make sense in the combined file
    delkws = ['GR-ANGLE','FILTER','BANDID2','BANDID3','BANDID4']
    for kw in delkws:
        pyfits.delval(combfile,kw)

    # combine JD (average), AIRMASS (average), EXPTIME (sum)
    #   we assume there is a c1.fits file for each image
    c1fs = [f for f in fs if 'c1.fits' in f]
    avgjd = np.mean([pyfits.getval(f,'JD') for f in c1fs])
    pyfits.setval(combfile,'JD',value=avgjd, comment='average of multiple exposures')
    print "average JD = " + str(avgjd)
    sumet = np.sum([pyfits.getval(f,'EXPTIME') for f in c1fs])
    pyfits.setval(combfile,'EXPTIME',value=sumet,comment='sum of multiple exposures')
    print "total EXPTIME = " + str(sumet)
    avgam = np.mean([pyfits.getval(f,'AIRMASS') for f in c1fs])
    pyfits.setval(combfile,'AIRMASS',value=avgam,comment='average of multiple exposures')
    print "avg AIRMASS = " + str(avgam)

    # update this to used avg jd midpoint of all exposures? 
    print "barycentric velocity correction (km/s) = ", 
    iraf.bcvcorr(spectra=combfile,keytime='UTC-OBS',keywhen='mid',
                 obslong="339:11:16.8",obslat="-32:22:46.2",obsalt='1798',obsname='saao', 
                 savebcv='yes',savejd='yes',printmode=2)
    pyfits.setval(combfile,'UTMID',comment='added by RVSAO task BCVCORR')
    pyfits.setval(combfile,'GJDN',comment='added by RVSAO task BCVCORR')
    pyfits.setval(combfile,'HJDN',comment='added by RVSAO task BCVCORR')
    pyfits.setval(combfile,'BCV',comment='added by RVSAO task BCVCORR (km/s)')
    pyfits.setval(combfile,'HCV',comment='added by RVSAO task BCVCORR (km/s)')
    iraf.dopcor(input=combfile,output='',redshift=-iraf.bcvcorr.bcv,isvelocity='yes',
                add='no',dispersion='yes',flux='no',verbose='yes')
    pyfits.setval(combfile,'DOPCOR01',comment='barycentric velocity correction applied')


    iraf.cd('..')
Пример #35
0
        for i, obj in enumerate(objs):
            print "File: {0} ({1}/{2})".format(obj, i+1, len(objs))
            lis = [x for x in fits if x.endswith(obj)]
            goodlis = select_specs(lis)
            filenames = ", ".join(goodlis)
            output = os.path.join(outdir, obj)
            if  len(goodlis) == 1:
                shutil.copy(goodlis[0], os.path.join(outdir2,
                    goodlis[0].replace("_hcg_", "_").replace("_h62_", "_")))


            continue
            if os.path.exists(output):
                continue
            iraf.scombine(input = filenames, output = output, group = 'all',
                          combine = 'sum', reject="none",
                          weight="none", w1 = 4000., w2=6500.,dw=1.25)
            ax = plt.subplot(111)
            for l in lis:
                w = wavelength_array(l)
                intens = pf.getdata(l)
                c = "k" if l in goodlis else "0.5"
                ax.plot(w, intens, "-", color=c, lw=0.4)
            w = wavelength_array(output)
            intens = pf.getdata(output)
            ax.plot(w, intens, "-r", lw=2)
            ax.set_xlabel("Wavelength (Angstrom)")
            ax.set_ylabel("Counts")
            plt.title(obj.replace("_", "-"))
            plt.pause(0.1)
            plt.show(block=0)
Пример #36
0
                order = order + 1
                print "No order exist"

print "Combining spectra"

os.chdir(outdir+"temp")
os.system("ls *.fits > combine_list")

iraf.scombine(
    input = "@combine_list",\
    output = "combine.fits",\
    group = "all",\
    combine = "average",\
    reject = "sigclip",\
    first = 1,\
    w1 = "INDEF",\
    w2 = "INDEF",\
    dw = "INDEF",\
    nw = "INDEF",\
    log = 0,\
    scale = "median",\
    zero = "none",\
    weight = "median",\
    nlow = 5,\
    nhigh = 5,\
    mclip = 1,\
    lsigma = 2.0,\
    hsigma = 2.0)

os.system("mv combine.fits ../"+object_name+".fits")
Пример #37
0
def speccombine(fs=None):
    iraf.cd('work')
    if fs is None:
        fs = glob('flx/sci*c?.fits')
    if len(fs)==0:
        print("No flux calibrated images to combine.")
        iraf.cd('..')
        return
    
    nsteps = 8001
    lamgrid = np.linspace(2000.0, 10000.0, nsteps)

    nfs = len(fs)
    # for each aperture
    # get all of the science images
    specs = np.zeros((nfs, nsteps))
    specerrs = np.zeros((nfs, nsteps))
    ap = 0
    for i, f in enumerate(fs):
        hdu = pyfits.open(f)
        w = WCS(f)
        # get the wavelengths of the pixels
        npix = hdu[0].data.shape[2]
        lam = w.all_pix2world(np.linspace(0, npix - 1, npix), 0, 0, 0)[0]
        # interpolate each spectrum onto a comman wavelength scale

        specs[i] = interp(lamgrid, lam, hdu[0].data[0][ap],
                          left=0.0, right=0.0)
        # Also calculate the errors. Right now we assume that the variances
        # interpolate linearly. This is not stricly correct but it should be
        # close. Also we don't include terms in the variance for the
        # uncertainty in the wavelength solution.
        specerrs[i] = interp(lamgrid, lam, hdu[0].data[3][ap] ** 2.0) ** 0.5

    # minimize the chi^2 given free parameters are multiplicative factors
    # We could use linear or quadratic, but for now assume constant
    p0 = np.ones(nfs)

    results = optimize.minimize(combine_spec_chi2, p0,
                                args=(lamgrid, specs, specerrs),
                                method='Nelder-Mead',
                                options={'maxfev': 1e5, 'maxiter': 1e5})

    # write the best fit parameters into the headers of the files
    # Dump the list of spectra into a string that iraf can handle
    iraf_filelist = str(fs).replace('[', '').replace(']', '').replace("'", '')

    # write the best fit results into a file
    lines = []
    for p in results['x']:
        lines.append('%f\n' % (1.0 / p))
    f = open('flx/scales.dat', 'w')
    f.writelines(lines)
    f.close()
    # run scombine after multiplying the spectra by the best fit parameters
    combfile = 'sci_com.fits'
    if os.path.exists(combfile):
        os.remove(combfile)
    iraf.scombine(iraf_filelist, combfile, scale='@flx/scales.dat',
                  reject='avsigclip', lthreshold=1e-19)

    # Remove the other apertures [TBD]
    # remove the sky and arc bands from the combined spectra. (or add back?? TBD)

    # remove some header keywords that don't make sense in the combined file
    delkws = ['GRATING','GR-ANGLE','FILTER','BANDID2','BANDID3','BANDID4']
    for kw in delkws:
        pyfits.delval(combfile,kw)

    # combine JD (average), AIRMASS (average), EXPTIME (sum)
    #   we assume there is a c1.fits file for each image
    c1fs = [f for f in fs if 'c1.fits' in f]
    avgjd = np.mean([pyfits.getval(f,'JD') for f in c1fs])
    pyfits.setval(combfile,'JD',value=avgjd)
    print "average JD = " + str(avgjd)
    sumet = np.sum([pyfits.getval(f,'EXPTIME') for f in c1fs])
    pyfits.setval(combfile,'EXPTIME',value=sumet)
    print "total EXPTIME = " + str(sumet)
    avgam = np.mean([pyfits.getval(f,'AIRMASS') for f in c1fs])
    pyfits.setval(combfile,'AIRMASS',value=avgam)
    print "avg AIRMASS = " + str(avgam)

    iraf.cd('..')
    return specs
Пример #38
0
def stitch_flats(outputnames,pivots,outstring):
    """Take a list of multispec files and a list of pivots and stitch together a master flat.

    The pivot values are inclusive, so if pivots = [72] then the master flat will contain fibers 1 - 72 from flat #1 and 73 - 109 from flat #2.

    Parameters
    ----------
    
    outputnames : list of str
        Names of the multispec files to stitch together
    
    pivots : list of int
        The fibers that form the borders of the stitch. If outputnames is length N, pivots must be length N - 1
    
    outstring : str
        The special, IRAF scrunch string used to identify intermediate files associated with a **dohydra** run

    Returns
    -------
    
    mastername : str
        The name of the stitched master multispec flat

    Notes
    -----
    The specifics of outstring depend on system and IRAF distribution. See :meth:`GradPak_flatfu.get_scrunch`

    """
    pivots = [0] + pivots + [109]

    tmpfiles = []
    print 'Extracting flat apertures...'
    for i, flat in enumerate(outputnames):
        print '\ttaking {} from {} to {}'.format(flat,pivots[i]+1,pivots[i+1])
        name = 'tmp{}'.format(flat)
        iraf.scopy(flat,name,
                   apertur='{}-{}'.format(pivots[i]+1,pivots[i+1]),
                   w1='INDEF',
                   w2='INDEF',
                   format='multispec',
                   verbose=False)
        tmpfiles.append(name)


    mastername = 'dFlat_master{}.ms.fits'.format(outstring)
    print 'Stitching together master flat {}'.format(mastername)    
        
    iraf.scombine(','.join(tmpfiles),mastername,
                  apertur='',
                  group='apertures',
                  first=True,
                  w1='INDEF',
                  w2='INDEF',
                  dw='INDEF',
                  nw='INDEF',
                  log=False,
                  scale='none',
                  zero='none',
                  weight='none',
                  logfile='flatfu.log')

    for tmp in tmpfiles:
        os.remove(tmp)
    return mastername
Пример #39
0
def stitch():
    #Find the mean 3100 and 3125 of blue and uv
    uvhdu = pyfits.open('13dh_uv.fits')
    uvlam = fitshdr_to_wave(uvhdu[0].header)
    w = np.logical_and(uvlam > 3050, uvlam < 3100)
    uvmean = np.median(uvhdu[0].data[1, 0, w])
    uvhdu.close()
    print("uv mean %e" % uvmean)

    bluehdu = pyfits.open('13dh_blue.fits')
    bluelam = fitshdr_to_wave(bluehdu[0].header)
    w = np.logical_and(bluelam > 3050, bluelam < 3100)
    blueuvmean = np.median(bluehdu[0].data[1, 0, w])
    print("blue mean uv %e" % blueuvmean)

    # Find mean of red and blue between 5375 and 5600
    w = np.logical_and(bluelam > 5375, bluelam < 5600)
    blueredmean = bluehdu[0].data[1, 0, w].mean()
    bluehdu.close()
    print("blue mean red %e" % blueredmean)

    redhdu = pyfits.open('13dh_red.fits')
    redlam = fitshdr_to_wave(redhdu[0].header)
    w = np.logical_and(redlam > 5375, redlam < 5600)
    redmean = redhdu[0].data[1, 0, w].mean()
    redhdu.close()
    print("red mean %e" % redmean)

    # trim uv at 3140
    iraf.unlearn(iraf.scopy)
    iraf.scopy('13dh_uv.fits',
               '13dh_uv_trim.fits',
               w1='INDEF',
               w2=3140,
               rebin='no')

    # trim blue at 3130 and 5600
    iraf.unlearn(iraf.scopy)
    iraf.scopy('13dh_blue.fits',
               '13dh_blue_trim.fits',
               w1=3130,
               w2=5600,
               rebin='no')

    # Trim red at 5375
    iraf.unlearn(iraf.scopy)
    iraf.scopy('13dh_red.fits',
               '13dh_red_trim.fits',
               w1=5375,
               w2='INDEF',
               rebin='no')

    # Copy the spectra from the second extraction to the first
    for im in [
            '13dh_uv_trim.fits', '13dh_blue_trim.fits', '13dh_red_trim.fits'
    ]:
        hdu = pyfits.open(im, mode='update')
        hdu[0].data[0, 0, :] = hdu[0].data[1, 0, :]
        hdu.flush()
        hdu.close()

    # Write out the scale factors
    lines = [
        '%f\n' % (blueuvmean / uvmean)**-1, '1.0\n',
        '%f\n' % (blueredmean / redmean)**-1
    ]

    f = open('scales.dat', 'w')
    f.writelines(lines)
    f.close()
    #Scombine with the correct scalings using average
    iraf.unlearn(iraf.scombine)
    iraf.scombine('13dh_uv_trim, 13dh_blue_trim, 13dh_red_trim',
                  '13dh_hst.fits',
                  scale='@scales.dat')
    return
Пример #40
0
                       (T, np.sqrt(covar[0, 0])))
            w, f = [], []
            for wav in range(900, 26000):
                file.write("%g\t%g\n" % (wav, bbody(wav, T, Area)))
                w.append(wav)
                f.append(bbody(wav, T, Area))

            iraf.rspec('bbody_sn_fit.dat',
                       'bbody_sn_fit.fits',
                       title='bbodyfit',
                       flux='no',
                       dtype='interp',
                       crval1=900,
                       cdelt1=1)
            iraf.scombine('bbody_sn_fit.fits,sn.fits,sn.fits,sn.fits',
                          'bsn_combo.fits',
                          combine='median')

            iraf.wspec('bsn_combo.fits', 'bsn_combo.txt', header='no')

            lcf = open('bsn_combo.txt', 'r')
            riga = lcf.readlines()
            lcf.close()
            wave, flux = [], []
            for line in riga:
                p = line.split()
                if float(line.split()[0]) >= fil_obs_min and float(
                        line.split()[0]
                ) <= fil_obs_max:  #to match the spectrum wavelegnths to those of the filter
                    wave.append(float(p[0]))
                    flux.append(float(p[1]))
Пример #41
0
def speccombine(fs=None):
    iraf.cd('work')
    if fs is None:
        fs = glob('flx/sci*c?.fits')
    if len(fs)==0:
        print("No flux calibrated images to combine.")
        iraf.cd('..')
        return
    
    nsteps = 8001
    lamgrid = np.linspace(2000.0, 10000.0, nsteps)

    nfs = len(fs)
    # for each aperture
    # get all of the science images
    specs = np.zeros((nfs, nsteps))
    specerrs = np.zeros((nfs, nsteps))
    ap = 0
    for i, f in enumerate(fs):
        hdu = pyfits.open(f)
        w = WCS(f)
        # get the wavelengths of the pixels
        npix = hdu[0].data.shape[2]
        lam = w.all_pix2world(np.linspace(0, npix - 1, npix), 0, 0, 0)[0]
        # interpolate each spectrum onto a comman wavelength scale

        specs[i] = interp(lamgrid, lam, hdu[0].data[0][ap],
                          left=0.0, right=0.0)
        # Also calculate the errors. Right now we assume that the variances
        # interpolate linearly. This is not stricly correct but it should be
        # close. Also we don't include terms in the variance for the
        # uncertainty in the wavelength solution.
        specerrs[i] = interp(lamgrid, lam, hdu[0].data[3][ap] ** 2.0) ** 0.5

    # minimize the chi^2 given free parameters are multiplicative factors
    # We could use linear or quadratic, but for now assume constant
    p0 = np.ones(nfs)

    results = optimize.minimize(combine_spec_chi2, p0,
                                args=(lamgrid, specs, specerrs),
                                method='Nelder-Mead',
                                options={'maxfev': 1e5, 'maxiter': 1e5})

    # write the best fit parameters into the headers of the files
    # Dump the list of spectra into a string that iraf can handle
    iraf_filelist = str(fs).replace('[', '').replace(']', '').replace("'", '')

    # write the best fit results into a file
    lines = []
    for p in results['x']:
        lines.append('%f\n' % (1.0 / p))
    f = open('flx/scales.dat', 'w')
    f.writelines(lines)
    f.close()
    # run scombine after multiplying the spectra by the best fit parameters
    combfile = 'sci_com.fits'
    if os.path.exists(combfile):
        os.remove(combfile)
    iraf.scombine(iraf_filelist, combfile, scale='@flx/scales.dat',
                  reject='avsigclip', lthreshold=-1e-17)

    # Remove the other apertures [TBD]
    # remove the sky and arc bands from the combined spectra. (or add back?? TBD)

    # remove some header keywords that don't make sense in the combined file
    delkws = ['GR-ANGLE','FILTER','BANDID2','BANDID3','BANDID4']
    for kw in delkws:
        pyfits.delval(combfile,kw)

    # combine JD (average), AIRMASS (average), EXPTIME (sum)
    #   we assume there is a c1.fits file for each image
    c1fs = [f for f in fs if 'c1.fits' in f]
    avgjd = np.mean([pyfits.getval(f,'JD') for f in c1fs])
    pyfits.setval(combfile,'JD',value=avgjd)
    print "average JD = " + str(avgjd)
    sumet = np.sum([pyfits.getval(f,'EXPTIME') for f in c1fs])
    pyfits.setval(combfile,'EXPTIME',value=sumet)
    print "total EXPTIME = " + str(sumet)
    avgam = np.mean([pyfits.getval(f,'AIRMASS') for f in c1fs])
    pyfits.setval(combfile,'AIRMASS',value=avgam)
    print "avg AIRMASS = " + str(avgam)

    iraf.cd('..')
    return specs
Пример #42
0
for file in os.listdir(os.getcwd()):
    if file.startswith('dspec'):
        iraf.calibrate(file, 'f'+file)

#combine spectra
finalcomb = []
for file in os.listdir(os.getcwd()):
    if file.startswith('fds'):
        finalcomb.append(file)


file1 = open('listascombine', 'w')
file1.writelines(["%s\n" % item  for item in finalcomb])
file1.close()

iraf.scombine('@listascombine', 'temp_quick.fits')

iraf.scopy('temp_quick.fits',target+'_quick.fits',w1='4000',w2='9000')

#mv final file in a dedicated folder
shutil.copy(target+'_quick.fits','./output/.')

#remove temp FILES
os.remove('lapalmaextinct.dat')
for file in os.listdir(os.getcwd()):
    if file.endswith('fits'):
        os.remove(file)
    elif file.startswith('lista'):
        os.remove(file)
    elif file.startswith('log'):
        os.remove(file)
Пример #43
0
                                airmass=objects_pars[imname][1],
                                exptime=objects_pars[imname][2])
        combname.append(imname + '_calib')

combstr = ','.join(combname)

combname = []
for imname in objects_pars:
    if 'sn' in objects_pars[imname][3]:
        combname.append(imname + '_calib')
combstr = ','.join(combname)
iraf.scombine(input=combstr,
              out='combinedspec_rmlow.ms',
              combine='average',
              reject='minmax',
              scale='none',
              sample='5000:7500',
              nlow=1,
              nhigh=1,
              nkeep=2)
iraf.scombine(input=combstr,
              out='combinedspec.ms',
              combine='average',
              reject='minmax',
              scale='none',
              sample='5000:7500',
              nlow=0,
              nhigh=1,
              nkeep=1)

l.write('calib=yes')