Example #1
0
def advanceprocess(instrume, obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp, sdbhost, sdbname, sdbuser, sdbpass, logfile, verbose):
   """Advance process the rss data"""
   rawpath = instrume+'/raw'
   prodpath = instrume+'/advance'
   os.mkdir(prodpath)
   instrume_name='RSS'
   prefix = 'P'
   img_list=[]
   for img in glob.glob('%s/raw/%s*fits' % (instrume, prefix)):
        struct=pyfits.open(img)
        if struct[0].header['PROPID'].upper().strip() != 'JUNK':
           img_list.append(img)
   images=','.join(img_list)
   obslog = '%s/%s%sOBSLOG.fits' % (prodpath, prefix, obsdate)
   gaindb = iraf.osfn('pysalt$data/%s/%samps.dat' % (instrume, instrume_name))
   xtalkfile = iraf.osfn('pysalt$data/%s/%sxtalk.dat' % (instrume, instrume_name))
   geomfile = iraf.osfn('pysalt$data/%s/%sgeom.dat' % (instrume, instrume_name))

   saltadvance(images, prodpath, obslogfile=obslog, gaindb=gaindb, xtalkfile=xtalkfile,
        geomfile=geomfile, subover=True,trim=True,masbias=None,
        subbias=False, median=False, function='polynomial', order=5,rej_lo=3,
        rej_hi=3, niter=5,interp='linear',  sdbhost=sdbhost, sdbname=sdbname,sdbuser=sdbuser, password=sdbpass,
        clobber=True, logfile=logfile, verbose=verbose)

   
   return
Example #2
0
def processdata(instrume, obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp, logfile, verbose):
   """Clean and process the data"""

   #set up instrument specific naming
   if instrume=='rss':
      instrume_name='RSS'
      prefix='P'
   elif instrume=='scam':
      instrume_name='SALTICAM'
      prefix='S'

   rawpath = instrume+'/raw'
   prodpath = instrume+'/product'
   img_list=[]
   for img in glob.glob('%s/raw/%s*fits' % (instrume, prefix)):
        struct=pyfits.open(img)
        if struct[0].header['PROPID'].upper().strip() != 'JUNK':
           img_list.append(img)
   img_str=','.join(img_list)
   obslog = '%s/%s%sOBSLOG.fits' % (prodpath, prefix, obsdate)
   gaindb = iraf.osfn('pysalt$data/%s/%samps.dat' % (instrume, instrume_name))
   #gaindb = ''
   xtalkfile = iraf.osfn('pysalt$data/%s/%sxtalk.dat' % (instrume, instrume_name))
   geomfile = iraf.osfn('pysalt$data/%s/%sgeom.dat' % (instrume, instrume_name))
   if len(img_list)>0:
        saltclean(images=img_str,outpath=prodpath,obslogfile=obslog,gaindb=gaindb,
                       xtalkfile=xtalkfile,geomfile=geomfile,subover=True,trim=True,
                       median=median,function=function,order=order,rej_lo=rej_lo,
                       rej_hi=rej_hi,niter=niter,masbias=True,subbias=False,interp=interp,
                       clobber=True,logfile=logfile,verbose=verbose)

   rawsize = 0.
   rawnum = 0
   prodsize = 0.
   prodnum = 0
   if len(img_list)>0:
       files = glob.glob('%s/raw/%s*.fits' % (instrume, prefix))
       rawnum = len(files)
       for file in files:
           rawsize += os.stat(file).st_size
       files = glob.glob('%s/product/*%s*.fits' % (instrume, prefix))
       prodnum = len(files)
       for file in files:
           prodsize += os.stat(file).st_size

   # collate RSS data for individual PIs
   outpath = '.'
   if len(img_list):
       saltobsid(propcode=propcode,obslog=obslog,rawpath=rawpath,prodpath=prodpath, outpath=outpath,clobber=True,logfile=logfile,verbose=verbose)
     
   return  rawsize, rawnum, prodsize, prodnum
Example #3
0
def specpolfilter(filterlist, infilelist):

    obss = len(infilelist)
    obsdict = obslog(infilelist)

    for b in range(obss):
        hdul = pyfits.open(infilelist[b])
        dwav = float(hdul['SCI'].header['CDELT1'])
        wav0 = float(hdul['SCI'].header['CRVAL1'])
        wavs = int(hdul['SCI'].header['NAXIS1'])

        ctypelist = (hdul['SCI'].header['CTYPE3']).split(',')
        stokes_sw = hdul['SCI'].data[:, 0, :]
        var_sw = hdul['VAR'].data[:, 0, :]
        pstokess = len(ctypelist) - 1
        ok_w = (hdul['BPM'].data[:, 0, :] == 0).all(axis=0)
        wav_w = wav0 + dwav * np.arange(wavs)
        print "\n" + infilelist[b]
        print("Filter " + pstokess * "%5s      Err     ") % tuple(
            ctypelist[1:])
        for filter in filterlist:
            if filter in ("U", "B", "V"):
                filterfile = iraf.osfn("pysalt$data/scam/filters/Johnson_" +
                                       filter + ".txt")
            elif filter in ("R", "I"):
                filterfile = iraf.osfn("pysalt$data/scam/filters/Cousins_" +
                                       filter + ".txt")
#            else:
#                (filter file in cwd)
            wav_l, feff_l = np.loadtxt(filterfile, dtype=float, unpack=True)
            feff_l[feff_l < .0001] = 0.
            feff_w = interp1d(wav_l, feff_l, kind='linear',
                              bounds_error=False)(wav_w)

            okf_w = (ok_w & (feff_w > .0003))
            feff_w[~okf_w] = 0.

            if feff_w[okf_w].sum() == 0: continue
            stokesfil_s = (feff_w[okf_w] * stokes_sw[:, okf_w]).sum(
                axis=1) / feff_w[okf_w].sum()
            varfil_s = (feff_w[okf_w]**2 * var_sw[:, okf_w]).sum(
                axis=1) / (feff_w[okf_w].sum()**2)
            nstokesfil_s = 100. * stokesfil_s / stokesfil_s[0]
            nerrfil_s = 100. * np.sqrt(
                varfil_s[:pstokess + 1]) / stokesfil_s[0]
            print ("%4s "+pstokess*"%9.4f %7.4f  ") % \
                (tuple(filter)+tuple(np.vstack((nstokesfil_s[1:],nerrfil_s[1:])).T.ravel()))

    return ()
Example #4
0
def get_pedsub_pars( camera, pedparname, Trl, pedsub_file, saapername, debug=False ):
    """ Get keyword parameter values for pedsub

    @param camera: camera number
    @type camera: int
    @param pedparname: parameter file name
    @type pedparname: string
    @param Trl: trailer file name
    @type Trl: string
    @param pedsub_file: name of file with pedsub pars
    @type pedsub_file: string
    @param saapername: name of file for SAA persistence image
    @type saapername: string
    @return: kwpars
    @rtype: dict
    """

    # Get params from the pedsubtab
    try:
        kwpars = getkwpars(camera,iraf.osfn(pedparname))
    except Exception as e:
        set_keys_final(_error,_error, pedsub_file, donestring,saapername)
        handle_exception(e, Trl, [], debug = debug)
        return _error

    return kwpars
Example #5
0
def agncalibrate(img, outfile, calfile, specformat='lcogt'):

    
    #set up some files that will be needed
    logfile='specext.log'

    hdu = fits.open(img)
    w1 = hdu[0].header['CRVAL1']
    p1 = hdu[0].header['CRPIX1']
    dw = hdu[0].header['CD1_1']
    f = hdu[0].data[0][0]
    e = hdu[0].data[3][0]
    xarr = np.arange(len(f))
    w = (xarr)*dw+w1


    cal_spectra=st.readspectrum(calfile, error=False, ftype='ascii')
    airmass=hdu[0].header['AIRMASS']
    exptime=hdu[0].header['EXPTIME']
    extfile=iraf.osfn("pysalt$data/site/suth_extinct.dat")
    ext_spectra=st.readspectrum(extfile, error=False, ftype='ascii')

    flux_spec=Spectrum.Spectrum(w, f, e, stype='continuum')
    flux_spec=calfunc(flux_spec, cal_spectra, ext_spectra, airmass, exptime, True)
    hdu[0].data[0][0] = flux_spec.flux
    hdu[0].data[3][0] = flux_spec.var
    hdu.writeto(outfile, clobber=True)
Example #6
0
def galextract(img, yc=None, dy=None, normalize=True, calfile=None, convert=True, specformat='ascii'):

    
    #set up some files that will be needed
    logfile='specext.log'


    #create the spectra text files for all of our objects
    spec_list=[]
    #skynormalize the data
    if normalize:
       specslitnormalize(img, 'n'+img, '', response=None, response_output=None, order=3, conv=1e-2, niter=20,
                     startext=0, clobber=True,logfile='salt.log',verbose=True)

    hdu=pyfits.open('n'+img)
    target=hdu[0].header['OBJECT']
    ofile='%s.%s_%i_%i.ltxt' % (target, extract_date(img), extract_number(img), yc)
    #ofile = img.replace('fits', 'txt')

    extract_spectra(hdu, yc, dy, ofile, smooth=False, grow=10, clobber=True, specformat=specformat, convert=convert)

    if calfile is not None: 
           airmass=hdu[0].header['AIRMASS']
           exptime=hdu[0].header['EXPTIME']
           extfile=iraf.osfn("pysalt$data/site/suth_extinct.dat")
           speccal(ofile, ofile.replace("txt", "spec"), calfile, extfile, airmass, exptime, clobber=True, logfile='salt.log', verbose=True)
Example #7
0
def preprocessdata(instrume, prefix,  obsdate, keyfile, log, logfile, verbose):
   """Run through all of the processing of the individual data files"""

   log.message('Beginning pre-processing of %s data' % instrume.upper())

   #set up the input path
   inpath=instrume+'/raw/'

   #creaate the product directory
   prodpath=instrume+'/product/'
   saltio.createdir(prodpath)

   # convert any slot mode binary data to FITS
   convertbin(inpath, iraf.osfn('pysalt$data/%s/%s_fits.config' % (instrume, instrume)), logfile, verbose)


   # fix sec keywords for data of unequal binning obtained before 2006 Aug 12
   if int(obsdate) < 20060812:
       pinfiles = instrume+'/raw/*.fits'
       log.message('Fixing SEC keywords in older data')
       log.message('SALTFIXSEC -- infiles=' + pinfiles)
       pipetools.saltfixsec(infiles=pinfiles)

   #fix the key words for the data set
   recfile = prodpath+prefix+ obsdate + 'KEYLOG.fits'
   img=','.join(glob.glob(inpath+prefix+'*fits'))
   if img:
       salteditkey(images=img,outimages=img,outpref='',keyfile=keyfile,recfile=recfile,
                       clobber=True,logfile=logfile,verbose=verbose)
Example #8
0
def galextract(img,
               yc=None,
               dy=None,
               normalize=True,
               calfile=None,
               convert=True,
               specformat='ascii'):

    #set up some files that will be needed
    logfile = 'specext.log'

    #create the spectra text files for all of our objects
    spec_list = []
    #skynormalize the data
    if normalize:
        specslitnormalize(img,
                          'n' + img,
                          '',
                          response=None,
                          response_output=None,
                          order=3,
                          conv=1e-2,
                          niter=20,
                          startext=0,
                          clobber=True,
                          logfile='salt.log',
                          verbose=True)

    hdu = pyfits.open('n' + img)
    target = hdu[0].header['OBJECT']
    ofile = '%s.%s_%i_%i.ltxt' % (target, extract_date(img),
                                  extract_number(img), yc)
    #ofile = img.replace('fits', 'txt')

    extract_spectra(hdu,
                    yc,
                    dy,
                    ofile,
                    smooth=False,
                    grow=10,
                    clobber=True,
                    specformat=specformat,
                    convert=convert)

    if calfile is not None:
        airmass = hdu[0].header['AIRMASS']
        exptime = hdu[0].header['EXPTIME']
        extfile = iraf.osfn("pysalt$data/site/suth_extinct.dat")
        speccal(ofile,
                ofile.replace("txt", "spec"),
                calfile,
                extfile,
                airmass,
                exptime,
                clobber=True,
                logfile='salt.log',
                verbose=True)
Example #9
0
def specpolfilter(filterlist, infilelist):


    obss = len(infilelist)
    obsdict=obslog(infilelist)

    for b in range(obss):
        hdul = pyfits.open(infilelist[b])
        dwav = float(hdul['SCI'].header['CDELT1'])
        wav0 = float(hdul['SCI'].header['CRVAL1'])
        wavs = int(hdul['SCI'].header['NAXIS1'])

        ctypelist = (hdul['SCI'].header['CTYPE3']).split(',')
        stokes_sw = hdul['SCI'].data[:,0,:]
        var_sw = hdul['VAR'].data[:,0,:]
        pstokess = len(ctypelist)-1     
        ok_w = (hdul['BPM'].data[:,0,:] == 0).all(axis=0)
        wav_w = wav0 + dwav*np.arange(wavs)
        print "\n"+infilelist[b]
        print ("Filter "+pstokess*"%5s      Err     ") % tuple(ctypelist[1:])
        for filter in filterlist:
            if filter in ("U","B","V"):
                filterfile = iraf.osfn("pysalt$data/scam/filters/Johnson_"+filter+".txt")
            elif filter in ("R","I"):
                filterfile = iraf.osfn("pysalt$data/scam/filters/Cousins_"+filter+".txt")         
#            else:
#                (filter file in cwd)
            wav_l,feff_l = np.loadtxt(filterfile,dtype=float,unpack=True)
            feff_l[feff_l < .0001] = 0.
            feff_w = interp1d(wav_l,feff_l,kind='linear',bounds_error=False)(wav_w)

            okf_w = (ok_w & (feff_w > .0003))
            feff_w[~okf_w] = 0.

            if feff_w[okf_w].sum() == 0: continue
            stokesfil_s = (feff_w[okf_w]*stokes_sw[:,okf_w]).sum(axis=1)/feff_w[okf_w].sum()
            varfil_s = (feff_w[okf_w]**2*var_sw[:,okf_w]).sum(axis=1)/(feff_w[okf_w].sum()**2)
            nstokesfil_s = 100.*stokesfil_s/stokesfil_s[0]
            nerrfil_s = 100.*np.sqrt(varfil_s[:pstokess+1])/stokesfil_s[0]
            print ("%4s "+pstokess*"%9.4f %7.4f  ") % \
                (tuple(filter)+tuple(np.vstack((nstokesfil_s[1:],nerrfil_s[1:])).T.ravel()))

    return()
Example #10
0
def init_northsouth(fs, topdir, rawpath):
    # Get the correct directory for the standard star
    base_stddir = 'spec50cal/'
    extfile = iraf.osfn('gmisc$lib/onedstds/kpnoextinct.dat') 
    observatory = 'Gemini-North'

    global is_GS
    is_GS = pyfits.getval(fs[0], 'DETECTOR') == 'GMOS + Hamamatsu'
    if is_GS:
        global dooverscan
        dooverscan = True
        if not os.path.exists(topdir + '/raw_fixhdr'):
            os.mkdir(topdir + '/raw_fixhdr')
        rawpath = '%s/raw_fixhdr/' % topdir
        os.system('gmoss_fix_headers.py --files="%s/raw/*.fits" --destination=%s' % (topdir, rawpath))
        base_stddir = 'ctionewcal/'
        observatory = 'Gemini-South'
        extfile = iraf.osfn('gmisc$lib/onedstds/ctioextinct.dat') 
    return extfile, observatory, base_stddir, rawpath
Example #11
0
def x1d_iraf(input,
             outdir="",
             update_input=iraf.no,
             find_target=iraf.no,
             cutoff=0.,
             location="",
             extrsize="",
             verbosity=1,
             version=""):

    # Interpret input parameters

    if outdir.strip() == "":
        outdir = None
    if outdir is not None:
        outdir = iraf.osfn(outdir)

    update_input = _toBoolean(update_input)
    if cutoff <= 0.:
        cutoff = None
    find_targ = {"flag": _toBoolean(find_target), "cutoff": cutoff}

    # convert from string to list
    location = commaSep(location, "float")
    extrsize = commaSep(extrsize, "int")

    inlist = input.split(",")
    more_input = []
    for filename in inlist:
        more_input.extend(filename.split())
    all_input = []
    for filename in more_input:
        filename = iraf.osfn(filename)
        all_input.extend(glob.glob(filename))

    X1D.extractSpec(all_input,
                    outdir=outdir,
                    update_input=update_input,
                    location=location,
                    extrsize=extrsize,
                    find_target=find_targ,
                    verbosity=verbosity)
Example #12
0
def mosred(infile_list, slitmask,propcode=None, dy=0, inter=True, guesstype='rss', guessfile='', rstep=100, automethod='Matchlines', preprocess=False):

    #set up the files
    infiles=','.join(['%s' % x for x in infile_list])
    obsdate=os.path.basename(infile_list[0])[7:15]

    #set up some files that will be needed
    logfile='spec'+obsdate+'.log'
    dbfile='spec%s.db' % obsdate

    #create the observation log
    obs_dict=obslog(infile_list)

    #check the value of dy


    #apply the mask to the data sets
    for i in range(len(infile_list)):
        specslit(image=infile_list[i], outimage='', outpref='s', exttype='rsmt', slitfile=slitmask,
                 outputslitfile='', regprefix='ds_', sections=3, width=25.0, sigma=2.2, thres=6.0, order=1, padding=5, yoffset=dy, 
                 inter=False, clobber=True, logfile=logfile, verbose=True)

    for i in range(len(infile_list)):
           if obs_dict['OBJECT'][i].upper().strip()=='ARC' and (obs_dict['PROPID'][i].upper().strip()==propcode or propcode is None):
               lamp=obs_dict['LAMPID'][i].strip().replace(' ', '')
               arcimage='s'+os.path.basename(infile_list[i])
               if lamp == 'NONE': lamp='CuAr'
               lampfile=iraf.osfn("pysalt$data/linelists/%s.salt" % lamp)

               specselfid(arcimage, '', 'a', arcimage, 'middlerow', 3, clobber=True, logfile=logfile, verbose=True)

               specidentify('a'+arcimage, lampfile, dbfile, guesstype=guesstype,
                  guessfile=guessfile, automethod=automethod,  function='legendre',  order=3,
                  rstep=rstep, rstart='middlerow', mdiff=20, thresh=3, niter=5, smooth=3,
                  inter=True, clobber=True, preprocess=True, logfile=logfile, verbose=True)

               #specrectify(arcimage, outimages='', outpref='x', solfile=dbfile, caltype='line',
               #    function='legendre',  order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None,
               #    blank=0.0, clobber=True, logfile=logfile, verbose=True)

    objimages=''
    spec_list=[]
    for i in range(len(infile_list)):
       if obs_dict['CCDTYPE'][i].count('OBJECT') and obs_dict['INSTRUME'][i].count('RSS')  and \
          (obs_dict['PROPID'][i].upper().strip()==propcode or propcode is None) and \
          obs_dict['OBSMODE'][i].count('SPECTROSCOPY'):
          img = infile_list[i]
          ##rectify it
          specselfid('s'+img, '', 'a', arcimage, 'middlerow', 3, clobber=True, logfile=logfile, verbose=True)
          specrectify('as'+img, outimages='', outpref='x', solfile=dbfile, caltype='line',
            function='legendre',  order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None,
            blank=0.0, clobber=True, logfile=logfile, verbose=True)
Example #13
0
def init_northsouth(fs, topdir, rawpath):
    # Get the correct directory for the standard star
    base_stddir = 'spec50cal/'
    extfile = iraf.osfn('gmisc$lib/onedstds/kpnoextinct.dat') 
    observatory = 'Gemini-North'

    global is_GS
    is_GS = fits.getval(fs[0], 'OBSERVAT') == 'Gemini-South'
    if 'Hamamatsu' in fits.getval(fs[0], 'DETECTOR'):
        global dooverscan
        dooverscan = True
        global do_qecorr
        do_qecorr = True
    if is_GS:
        if not os.path.exists(topdir + '/raw_fixhdr'):
            iraf.mkdir(topdir + '/raw_fixhdr')
        #rawpath = '%s/raw_fixhdr/' % topdir
        #os.system('gmoss_fix_headers.py --files="%s/raw/*.fits" --destination=%s' % (topdir, rawpath))
        base_stddir = 'ctionewcal/'
        observatory = 'Gemini-South'
        extfile = iraf.osfn('gmisc$lib/onedstds/ctioextinct.dat') 
    return extfile, observatory, base_stddir, rawpath
Example #14
0
def mosred(infile_list, slitmask,propcode=None, dy=0, inter=True, guesstype='rss', guessfile='', rstep=100, automethod='Matchlines'):

    #set up the files
    infiles=','.join(['%s' % x for x in infile_list])
    obsdate=os.path.basename(infile_list[0])[7:15]

    #set up some files that will be needed
    logfile='spec'+obsdate+'.log'
    dbfile='spec%s.db' % obsdate

    #create the observation log
    obs_dict=obslog(infile_list)


    #apply the mask to the data sets
    for i in range(len(infile_list)):
        specslit(image=infile_list[i], outimage='', outpref='s', exttype='rsmt', slitfile='../../P001423N01.xml', 
                 outputslitfile='', regprefix='ds_', sections=3, width=25.0, sigma=2.2, thres=6.0, order=1, padding=5, yoffset=dy, 
                 inter=False, clobber=True, logfile=logfile, verbose=True)

    for i in range(len(infile_list)):
           if obs_dict['OBJECT'][i].upper().strip()=='ARC' and obs_dict['PROPID'][i].upper().strip()==propcode:
               lamp=obs_dict['LAMPID'][i].strip().replace(' ', '')
               arcimage='s'+os.path.basename(infile_list[i])
               if lamp == 'NONE': lamp='CuAr'
               lampfile=iraf.osfn("../../%s.salt" % lamp)

               specselfid(arcimage, '', 'a', arcimage, 'middlerow', 3, clobber=True, logfile=logfile, verbose=True)

               specidentify('a'+arcimage, lampfile, dbfile, guesstype=guesstype,
                  guessfile=guessfile, automethod=automethod,  function='legendre',  order=3,
                  rstep=rstep, rstart='middlerow', mdiff=20, thresh=3, niter=5, smooth=3,
                  inter=False, clobber=True, logfile=logfile, verbose=True)

               #specrectify(arcimage, outimages='', outpref='x', solfile=dbfile, caltype='line',
               #    function='legendre',  order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None,
               #    blank=0.0, clobber=True, logfile=logfile, verbose=True)

    objimages=''
    spec_list=[]
    for i in range(len(infile_list)):
       if obs_dict['CCDTYPE'][i].count('OBJECT') and obs_dict['INSTRUME'][i].count('RSS')  and \
          obs_dict['PROPID'][i].upper().strip()==propcode and \
          obs_dict['OBSMODE'][i].count('SPECTROSCOPY'):
          img = infile_list[i]
          ##rectify it
          specselfid('s'+img, '', 'a', arcimage, 'middlerow', 3, clobber=True, logfile=logfile, verbose=True)
          specrectify('as'+img, outimages='', outpref='x', solfile=dbfile, caltype='line',
            function='legendre',  order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None,
            blank=0.0, clobber=True, logfile=logfile, verbose=True)
Example #15
0
def buildtmc(tmcname):
    from pyraf import iraf
    from iraf import stsdas,hst_calib,synphot
    out=open('buildtmc.log','w')
    f=pyfits.open(tmcname)
    flist=f[1].data.field('filename')
    iraf.set(crrefer='./') #work locally

    for k in range(len(flist)):
        oldname=iraf.osfn(flist[k]).split('[')[0]
        newname=fincre(oldname)
        if os.path.exists(newname):
            flist[k]=fincre(flist[k])
        else:
            out.write("%s: no change necessary\n"%oldname)
    f.writeto(tmcname.replace(".fits","_new.fits"))
    out.close()
Example #16
0
def specred(infile_list, target, propcode, calfile=None, inter=True, automethod='Matchlines'):

    #set up the files
    infiles=','.join(['%s' % x for x in infile_list])
    obsdate=os.path.basename(infile_list[0])[7:15]

    #set up some files that will be needed
    logfile='spec'+obsdate+'.log'
    dbfile='spec%s.db' % obsdate

    #create the observation log
    obs_dict=obslog(infile_list)

    for i in range(len(infile_list)):
           if obs_dict['OBJECT'][i].upper().strip()=='ARC' and obs_dict['PROPID'][i].upper().strip()==propcode:
               lamp=obs_dict['LAMPID'][i].strip().replace(' ', '')
               arcimage=os.path.basename(infile_list[i])
               if lamp == 'NONE': lamp='CuAr'
               lampfile=iraf.osfn("pysalt$data/linelists/%s.salt" % lamp)

               specidentify(arcimage, lampfile, dbfile, guesstype='rss',
                  guessfile='', automethod=automethod,  function='legendre',  order=3,
                  rstep=100, rstart='middlerow', mdiff=20, thresh=3, niter=5, smooth=3,
                  inter=False, clobber=True, logfile=logfile, verbose=True)

               specrectify(arcimage, outimages='', outpref='x', solfile=dbfile, caltype='line',
                   function='legendre',  order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None,
                   blank=0.0, clobber=True, logfile=logfile, verbose=True)

    objimages=''
    spec_list=[]
    for i in range(len(infile_list)):
       if obs_dict['CCDTYPE'][i].count('OBJECT') and obs_dict['INSTRUME'][i].count('RSS')  and obs_dict['PROPID'][i].upper().strip()==propcode:
          img = infile_list[i]
          ##rectify it
          specrectify(img, outimages='', outpref='x', solfile=dbfile, caltype='line',
            function='legendre',  order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None,
            blank=0.0, clobber=True, logfile=logfile, verbose=True)

          #extract the spectra
          spec_list.append(extract_spectra('x'+img, yc=1030, calfile=calfile, findobject=True, smooth=False, maskzeros=True, clobber=True))
          
    #combine the results
    w,f,e = speccombine(spec_list, obsdate)
    outfile = "%s_%s.spec" % (target, obsdate)
    write_spectra(outfile, w,f,e)
Example #17
0
def buildtmc(tmcname):
    from pyraf import iraf
    from iraf import stsdas, hst_calib, synphot
    out = open('buildtmc.log', 'w')
    f = pyfits.open(tmcname)
    flist = f[1].data.field('filename')
    iraf.set(crrefer='./')  #work locally

    for k in range(len(flist)):
        oldname = iraf.osfn(flist[k]).split('[')[0]
        newname = fincre(oldname)
        if os.path.exists(newname):
            flist[k] = fincre(flist[k])
        else:
            out.write("%s: no change necessary\n" % oldname)
    f.writeto(tmcname.replace(".fits", "_new.fits"))
    out.close()
Example #18
0
    def setglobal(self, fname=None):
        if fname is None:
            fname = __file__
        self.propername = self.id()
        base, ext = os.path.splitext(os.path.basename(fname))
        main, case, test = self.propername.split('.')
        self.name = os.path.join(base, test, case)
        self.wavename = self.name + '_wave.fits'
        #Make sure the directories exist
        dirname = os.path.dirname(self.name)
        if not os.path.isdir(dirname):
            os.makedirs(dirname)

        self.file = self.name
        self.thresh = 0.01
        self.superthresh = 0.20
        self.sigthresh = 0.01
        self.discrep = -99
        self.tda = {
            'Obsmode': self.obsmode,
            'Spectrum': self.spectrum,
            'ETCid': None,  #placeholder
            'Thresh': self.thresh,
            'Superthresh': self.superthresh,
            'SigThresh': self.sigthresh,
            'crrefer': iraf.osfn('crrefer$'),
            'pysyn_cdbs': os.environ['PYSYN_CDBS'],
            'SkyLines': self.hasSkyLines()
        }
        try:
            self.tda['Subset'] = self.subset
        except AttributeError:
            pass
        try:
            self.tda['ETCid'] = self.etcid
        except AttributeError:
            pass
        try:
            self.tda['form'] = self.form
        except AttributeError:
            pass

        self.tra = {}
Example #19
0
def makesensfunc(scifiles, objname, base_stddir, extfile):
    #TODO use individual standard star observations in each setting, not just red and blue
    for f in scifiles:
        redorblue = getredorblue(f)
        # If this is a standard star, run standard
        # Standards will have an observation class of either progCal or partnerCal
        # Standards will have an observation class of either progCal or partnerCal
        obsclass = fits.getval(f[:-4] + '.fits', 'OBSCLASS')
        if obsclass == 'progCal' or obsclass == 'partnerCal':
            # Figure out which directory the stardard star is in
            stddir = iraf.osfn('gmisc$lib/onedstds/') + base_stddir
            
            # iraf.gsstandard('est' + f[:-4], 'std' + redorblue,
            #                'sens' + redorblue, starname=objname.lower(),
            #                caldir='gmisc$lib/onedstds/'+stddir, fl_inter=True)

            specsens('et' + f[:-4] + '.fits', 'sens' + redorblue + '.fits',
                     stddir + objname + '.dat' , extfile,
                     float(fits.getval(f[:-4] + '.fits', 'AIRMASS')),
                     float(fits.getval(f[:-4] + '.fits', 'EXPTIME')))
Example #20
0
def makesensfunc(scifiles, objname, base_stddir, extfile):
    #TODO use individual standard star observations in each setting, not just red and blue
    for f in scifiles:
        redorblue = getredorblue(f)
        # If this is a standard star, run standard
        # Standards will have an observation class of either progCal or partnerCal
        # Standards will have an observation class of either progCal or partnerCal
        obsclass = pyfits.getval(f[:-4] + '.fits', 'OBSCLASS')
        if obsclass == 'progCal' or obsclass == 'partnerCal':
            # Figure out which directory the stardard star is in
            stddir = iraf.osfn('gmisc$lib/onedstds/') + base_stddir
            
            # iraf.gsstandard('est' + f[:-4], 'std' + redorblue,
            #                'sens' + redorblue, starname=objname.lower(),
            #                caldir='gmisc$lib/onedstds/'+stddir, fl_inter=True)

            specsens('et' + f[:-4] + '.fits', 'sens' + redorblue + '.fits',
                     stddir + objname + '.dat' , extfile,
                     float(pyfits.getval(f[:-4] + '.fits', 'AIRMASS')),
                     float(pyfits.getval(f[:-4] + '.fits', 'EXPTIME')))
Example #21
0
    def setglobal(self,fname=None):
        if fname is None:
            fname=__file__
        self.propername=self.id()
        base,ext=os.path.splitext(os.path.basename(fname))
        main,case,test=self.propername.split('.')
        self.name=os.path.join(base,test,case)
        self.wavename=self.name+'_wave.fits'
        #Make sure the directories exist
        dirname=os.path.dirname(self.name)
        if not os.path.isdir(dirname):
            os.makedirs(dirname)

        self.file=self.name
        self.thresh=0.01
        self.superthresh=0.20
        self.sigthresh=0.01
        self.discrep=-99
        self.tda={'Obsmode':self.obsmode,
                 'Spectrum':self.spectrum,
                  'ETCid':None, #placeholder
                 'Thresh':self.thresh,
                  'Superthresh':self.superthresh,
                  'SigThresh':self.sigthresh,
                  'crrefer':iraf.osfn('crrefer$'),
                  'pysyn_cdbs':os.environ['PYSYN_CDBS'],
                  'SkyLines':self.hasSkyLines()}
        try:
            self.tda['Subset']=self.subset
        except AttributeError:
            pass
        try:
            self.tda['ETCid']=self.etcid
        except AttributeError:
            pass
        try:
            self.tda['form']=self.form
        except AttributeError:
            pass

        self.tra={}
Example #22
0
def specred(infile_list, propcode=None, inter=True, automethod='Matchlines'):

    #set up the files
    infiles=','.join(['%s' % x for x in infile_list])
    obsdate=os.path.basename(infile_list[0])[7:15]

    #set up some files that will be needed
    logfile='spec'+obsdate+'.log'
    dbfile='spec%s.db' % obsdate

    #create the observation log
    obs_dict=obslog(infile_list)

    for i in range(len(infile_list)):
           if obs_dict['OBJECT'][i].upper().strip()=='ARC' and obs_dict['PROPID'][i].upper().strip()==propcode:
               lamp=obs_dict['LAMPID'][i].strip().replace(' ', '')
               arcimage=os.path.basename(infile_list[i])
               if lamp == 'NONE': lamp='CuAr'
               lampfile=iraf.osfn("pysalt$data/linelists/%s.salt" % lamp)
               #lampfile='/Users/crawford/research/kepler/Xe.salt' 

               specidentify(arcimage, lampfile, dbfile, guesstype='rss',
                  guessfile='', automethod=automethod,  function='legendre',  order=3,
                  rstep=100, rstart='middlerow', mdiff=20, thresh=3, niter=5, smooth=3,
                  inter=inter, clobber=True, logfile=logfile, verbose=True)

               #specrectify(arcimage, outimages='', outpref='x', solfile=dbfile, caltype='line',
               #    function='legendre',  order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None,
               #    blank=0.0, clobber=True, logfile=logfile, verbose=True)

    objimages=''
    spec_list=[]
    for i in range(len(infile_list)):
       if obs_dict['CCDTYPE'][i].count('OBJECT') and obs_dict['INSTRUME'][i].count('RSS')  and obs_dict['PROPID'][i].upper().strip()==propcode:
          img = infile_list[i]
          ##rectify it
          specrectify(img, outimages='', outpref='x', solfile=dbfile, caltype='line',
            function='legendre',  order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None,
            blank=0.0, nearest=True, clobber=True, logfile=logfile, verbose=True)
Example #23
0
    # extract the order of the fit and the positions of each of the slits
    order, slit_positions = mt.read_slits_HDUtable(struct[slitext])

    return order, slit_positions


def check_ypos(slit_positions, data):
    """Check the y-position"""
    from scipy.signal import find_peaks_cwt

    # determine the points that 
    y = data.sum(axis=1)
    yp = find_peaks_cwt(np.gradient(y), np.array([5]))
    x = np.arange(len(y))
    s_min= x.max()
    s_max= x.min()
    for n, ym, yx  in slit_positions: 
        if ym < s_min: s_min = ym
        if yx > s_max: s_max = yx
    dy = yp[yp>10].min()-s_min
    for i in range(len(slit_positions)):
        n, ym, yx = slit_positions[i]
        slit_positions[i] = [n, ym+dy, yx+dy]
    
    return slit_positions, dy

# main code
parfile = iraf.osfn("saltspec$specslit.par")
t = iraf.IrafTaskFactory(taskname="specslit", value=parfile, function=specslit,
                         pkgname='saltspec')
Example #24
0
        ])
    parser.add_argument('--verbose',
                        action='store_true',
                        help='Write to a log file?')
    parser.add_argument('--logfile',
                        '-l',
                        default='kepdeltapix.log',
                        help='Name of ascii log file',
                        dest='logfile',
                        type=str)
    parser.add_argument('--status',
                        '-e',
                        help='Exit status (0=good)',
                        default=0,
                        dest='status',
                        type=int)

    args = parser.parse_args()
    cmdLine = True
    kepdeltapix(args.infile, args.rownum, args.columns, args.rows, args.fluxes,
                args.prfdir, args.interpolation, args.tolerance, args.fittype,
                args.imscale, args.cmap, args.verbose, args.logfile,
                args.status, cmdLine)

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepdeltapix.par")
    t = iraf.IrafTaskFactory(taskname="kepdeltapix",
                             value=parfile,
                             function=kepdeltapix)
Example #25
0
    #set binning
    binstr = saltstring.makebinstr(flatkeys[2])

    #set gain
    gnstr = saltstring.makegainstr(flatkeys[3])

    #set readout
    rostr = saltstring.makereadoutstr(flatkeys[4])

    fltstr = flatkeys[5].strip()

    if flatkeys[6].count('SKY'):
        skystr = 'Sky'
    else:
        skystr = ''

    flatname = '%s%s%sFlat%s%s%s%s%s.fits' % (instr, obsdate, skystr, mdstr,
                                              binstr, gnstr, rostr, fltstr)
    return flatname


# -----------------------------------------------------------
# main code

if not iraf.deftask('saltclean'):
    parfile = iraf.osfn("saltred$saltclean.par")
    t = iraf.IrafTaskFactory(taskname="saltclean",
                             value=parfile,
                             function=saltclean,
                             pkgname='saltred')
Example #26
0
                    # update the header values with the image name and extension number
                    try:
                       hdue = pyfits.ImageHDU(oarray)
                       hdue.header=header
                       hdue.header.update('ONAME',infile,'Original image name')
                       hdue.header.update('OEXT',hdu,'Original extension number')
                    except Exception as e:
                       msg='SALTPHOT--WARNING:  Could not update image in newfits file for %s ext %i because %s' \
                           % (infile, hdu, e)
                       raise SaltIOError(msg)

                    hduList.append(hdue)
                    j +=1
    
           # close FITS file
           saltio.closefits(struct)
    
        # close the output fits file:
        try:
           # write out the file
           hduList.flush()
           hduList.close()
        except Exception as e:
           raise SaltIOError('Failed to write %s because %s' % (outfits, e))
    
# -----------------------------------------------------------
# main code

parfile = iraf.osfn("slottools$slotback.par")
t = iraf.IrafTaskFactory(taskname="slotback",value=parfile,function=slotback,pkgname='slottools')
Example #27
0
    return t_real, t_wrong, t_start, t_arr, ysum_arr

def find_real_time(dt, t_min, t_max):
    """Find the real exposure+dead time

    returns float
    """
    t_e=np.arange(t_min,t_max,0.0001)
    ysum=t_e*0.0
    for i in range(len(t_e)):
        ysum[i]=ntime_func(dt, t_e[i])
    return t_e, ysum

def ntime_func(dt, t_e):
    """Merit function to determine best time
    Weighted for the number of objects in each step

    return float
    """
    y=0
    for j in range(len(dt)):
        i=dt[j]/t_e
        y += abs(i-round(i))
    return y

# -----------------------------------------------------------
# main code

parfile = iraf.osfn("slottools$slotutcfix.par")
t = iraf.IrafTaskFactory(taskname="slotutcfix",value=parfile,function=slotutcfix,pkgname='slottools')
Example #28
0
# -----------------------------------------------------------
# main
if '--shell' in sys.argv:
    import argparse
    
    parser = argparse.ArgumentParser(description='Plot, create or edit custom light curve extraction masks for target pixel files')
    parser.add_argument('--shell', action='store_true', help='Are we running from the shell?')
    parser.add_argument('infile', help='name of input target pixel FITS file', type=str)
    parser.add_argument('maskfile', help='name of ASCII custom aperture definition file', type=str)
    parser.add_argument('--plotfile', default='', help='name of output PNG plot file', type=str)
    parser.add_argument('--tabrow', default=2177, help='The table row containing the image to plot', type=int)
    parser.add_argument('--imin', default=1.5e5, help='minimum of image intensity scale [e-]', type=float)
    parser.add_argument('--imax', default=5.0e5, help='maximum of image intensity scale [e-]', type=float)
    parser.add_argument('--iscale', default='logarithmic', help='type of image intensity scale', 
        type=str, choices=['linear','logarithmic','squareroot'])
    parser.add_argument('--cmap', default='PuBu', help='image colormap', type=str)
    parser.add_argument('--verbose', action='store_true', help='Write to a log file?')
    parser.add_argument('--logfile', '-l', help='Name of ascii log file', default='kepcotrend.log', dest='logfile', type=str)
    parser.add_argument('--status', '-e', help='Exit status (0=good)', default=0, dest='status', type=int)

    args = parser.parse_args()
    cmdLine=True

    kepmask(args.infile, args.maskfile, args.plotfile, args.tabrow, args.imin, args.imax, args.iscale,
        args.cmap, args.verbose, args.logfile, args.status, cmdLine)
    
else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepmask.par")
    t = iraf.IrafTaskFactory(taskname="kepmask", value=parfile, function=kepmask)
Example #29
0
def findwskeyword(keyword, sol):
    """Find and return a value for a keyword in the list of the wavelength solution"""
    i = sol.index(keyword)
    j = sol[i:].index('\n')
    return sol[i:i + j].split('=')[1].strip()


def enterdatetime(dstr):
    """Break up the datetime string to create a datetime object
       return datetime
    """
    dlist = dstr.split()
    year, month, day = dlist[0].split('-')
    hour, minute, second = dlist[1].split(':')

    return datetime.datetime(
        int(year), int(month), int(day), int(hour), int(minute), int(float(second)))


def subtracttime(d1, d2):
    """Return the difference in two dates in seconds"""
    dt = max(d1, d2) - min(d1, d2)
    return 86400 * dt.days + dt.seconds


# main code
if not iraf.deftask('specrectify'):
    parfile = iraf.osfn("saltspec$specrectify.par")
    t = iraf.IrafTaskFactory(taskname="specrectify", value=parfile, 
                             function=specrectify, pkgname='saltspec')
Example #30
0
    bpm:  bad pixel mask array

   """
   if bpm is None:
       bpm=arr*0.0+1.0
       wei=None
   else:
       # correct the weights for the bad pixel mask
       if ivar is None: ivar=arr*0.0+1.0
       wei=ivar*(1-bpm)
       #TODO: need to add a check to make sure that there are is a place
       #to make sure that one of the weights is at least zero
       check_wei=wei.sum(axis=0)
       wei[0][check_wei==0]=wei.min()
  
   if method=='average':
       c_arr, s_arr=np.average(arr, axis=0, weights=wei, returned=True)
       return c_arr, s_arr
   elif method=='median':
       return np.median(arr, axis=0), bpm.sum(axis=0)
   else: 
       msg='%s is not a method for combining arrays' % method
       raise SaltError(msg)

# -----------------------------------------------------------
# main code 
if not iraf.deftask('saltcombine'):
   parfile = iraf.osfn("saltred$saltcombine.par") 
   t = iraf.IrafTaskFactory(taskname="saltcombine",value=parfile,function=saltcombine, pkgname='saltred')
Example #31
0
    parser.add_argument('--verbose', action='store_true', help='Write to a log file?')
    parser.add_argument('--logfile', '-l', help='Name of ascii log file', default='keptransit.log', dest='logfile', type=str)
    parser.add_argument('--status', '-e', help='Exit status (0=good)', default=0, dest='status', type=int)

    cmdLine=True


    


    args = parser.parse_args()
    
    keptransit(args.inputfile, args.outputfile, args.datacol, args.errorcol,
        args.periodini_d, args.rprsini, args.T0ini,
        args.Eccini, args.arsini, args.incini, args.omegaini, args.LDparams, args.secini,
        args.fixperiod, args.fixrprs, args.fixT0,
        args.fixEcc, args.fixars, args.fixinc, args.fixomega, args.fixsec, args.fixfluxoffset,
        args.removeflaggeddata, args.ftol,
        args.fitter, args.norm,
        args.clobber, args.plot, args.verbose, args.logfile, args.status,
        cmdLine)
    
    

else:
    from pyraf import iraf
    
    parfile = iraf.osfn("kepler$keptransit.par")
    t = iraf.IrafTaskFactory(taskname="keptransit", value=parfile, function=keptransit)
    
Example #32
0
    parser.add_argument('--verbose',
                        action='store_true',
                        help='Write to a log file?')
    parser.add_argument('--logfile',
                        '-l',
                        help='Name of ascii log file',
                        default='kepcotrend.log',
                        dest='logfile',
                        type=str)
    parser.add_argument('--status',
                        '-e',
                        help='Exit status (0=good)',
                        default=0,
                        dest='status',
                        type=int)

    args = parser.parse_args()

    cmdLine = True

    kepdiffim(args.infile, args.outfile, args.plotfile, args.imscale,
              args.cmap, args.filter, args.function, args.cutoff, args.clobber,
              args.verbose, args.logfile, args.status, cmdLine)

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepdiffim.par")
    t = iraf.IrafTaskFactory(taskname="kepdiffim",
                             value=parfile,
                             function=kepdiffim)
Example #33
0
    # create the columns and the
    for ap in ap_list:
        fvar = abs(ap.lvar)**0.5
        # create the columns
        col1 = pyfits.Column(name='wavelength',
                             format='D',
                             unit='Angstroms',
                             array=ap.wave)
        col2 = pyfits.Column(name='counts',
                             format='D',
                             unit='Counts',
                             array=ap.ldata)
        col3 = pyfits.Column(name='counts_err', format='D', array=fvar)

        # add to the table
        tbhdu = pyfits.new_table([col1, col2, col3])
        hdulist.append(tbhdu)

    # write it out
    hdulist.writeto(ofile)
    return


# main code

parfile = iraf.osfn("saltspec$specextract.par")
t = iraf.IrafTaskFactory(taskname="specextract",
                         value=parfile,
                         function=specextract,
                         pkgname='saltspec')
Example #34
0
    # convert from string to list
    location = commaSep(location, "float")
    extrsize = commaSep(extrsize, "int")

    inlist = input.split(",")
    more_input = []
    for filename in inlist:
        more_input.extend(filename.split())
    all_input = []
    for filename in more_input:
        filename = iraf.osfn(filename)
        all_input.extend(glob.glob(filename))

    X1D.extractSpec(all_input,
                    outdir=outdir,
                    update_input=update_input,
                    location=location,
                    extrsize=extrsize,
                    find_target=find_targ,
                    verbosity=verbosity)


# Initialize IRAF Task definition now...
parfile = iraf.osfn(_parfile)
junk = iraf.IrafTaskFactory(taskname=_taskname,
                            value=parfile,
                            pkgname=PkgName,
                            pkgbinary=PkgBinary,
                            function=x1d_iraf)
iraf.x1dcorr.version = _version
Example #35
0
        parser.add_argument('--verbose',
                            action='store_true',
                            help='Write to a log file?')
        parser.add_argument('--logfile',
                            '-l',
                            default='kepprfphot.log',
                            help='Name of ascii log file',
                            dest='logfile',
                            type=str)
        parser.add_argument('--status',
                            '-e',
                            help='Exit status (0=good)',
                            default=0,
                            dest='status',
                            type=int)

        args = parser.parse_args()
        cmdLine = True
        kepprf(args.infile, args.plotfile, args.rownum, args.columns,
               args.rows, args.fluxes, args.border, args.background,
               args.focus, args.prfdir, args.xtol, args.ftol, args.imscale,
               args.cmap, args.labcol, args.apercol, args.plot, args.verbose,
               args.logfile, args.status, cmdLine)

    else:
        from pyraf import iraf
        parfile = iraf.osfn("kepler$kepprf.par")
        t = iraf.IrafTaskFactory(taskname="kepprf",
                                 value=parfile,
                                 function=kepprf)
Example #36
0
    parser.add_argument('--verbose',
                        action='store_true',
                        help='Write to a log file?')
    parser.add_argument('--logfile',
                        '-l',
                        help='Name of ascii log file',
                        default='keptimefix.log',
                        dest='logfile',
                        type=str)
    parser.add_argument('--status',
                        '-e',
                        help='Exit status (0=good)',
                        default=0,
                        dest='status',
                        type=int)

    cmdLine = True

    args = parser.parse_args()
    cmdLine = True

    keptimefix(args.infile, args.outfile, args.clobber, args.verbose,
               args.logfile, args.status, cmdLine)

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$keptimefix.par")
    t = iraf.IrafTaskFactory(taskname="keptimefix",
                             value=parfile,
                             function=keptimefix)
Example #37
0
    parser.add_argument('plotfile', help='Name of output PNG plot file', type=str)
    parser.add_argument('--rownum', '-r', default=2200, help='Row number of image stored in infile', dest='rownum', type=int)
    parser.add_argument('--columns', help='Column number of each source to be fit', type=str)
    parser.add_argument('--rows', help='Row number of each source to be fit', type=str)
    parser.add_argument('--fluxes', help='Relative flux of each source to be fit', type=str)
    parser.add_argument('--border', '-b', help='Order of background polynmial fit', default=1, dest='border', type=int)
    parser.add_argument('--background', action='store_true', help='Fit background?', default=False)
    parser.add_argument('--focus', action='store_true', help='Fit focus changes?', default=False)
    parser.add_argument('--prfdir', help='Folder containing Point Response Function FITS files', type=str)
    parser.add_argument('--xtol', '-x', default=1.0e-4, help='Fit parameter tolerance', dest='xtol', type=float)
    parser.add_argument('--ftol', '-f', default=1.0, help='Fit minimization tolerance', dest='ftol', type=float)
    parser.add_argument('--imscale', '-i', help='Type of image intensity scale', default='linear', dest='imscale', type=str,choices=['linear','logarithmic','squareroot'])
    parser.add_argument('--colmap', '-c', help='Image colormap', default='YlOrBr', dest='cmap', type=str,choices=['Accent','Blues','BrBG','BuGn','BuPu','Dark2','GnBu','Greens','Greys','OrRd','Oranges','PRGn','Paired','Pastel1','Pastel2','PiYG','PuBu','PuBuGn','PuOr','PuRd','Purples','RdBu','RdGy','RdPu','RdYlBu','RdYlGn','Reds','Set1','Set2','Set3','Spectral','YlGn','YlGnBu','YlOrBr','YlOrRd','afmhot','autumn','binary','bone','brg','bwr','cool','copper','flag','gist_earth','gist_gray','gist_heat','gist_ncar','gist_rainbow','gist_yarg','gnuplot','gnuplot2','gray','hot','hsv','jet','ocean','pink','prism','rainbow','seismic','spectral','spring','summer','terrain','winter','browse'])
    parser.add_argument('--labcol', help='Label color', default='#ffffff', type=str)
    parser.add_argument('--apercol', help='Aperture color', default='#ffffff', type=str)
    parser.add_argument('--plot', action='store_true', help='Plot fit results?', default=False)
    parser.add_argument('--verbose', action='store_true', help='Write to a log file?')
    parser.add_argument('--logfile', '-l', default='kepprfphot.log', help='Name of ascii log file', dest='logfile', type=str)
    parser.add_argument('--status', '-e', help='Exit status (0=good)', default=0, dest='status', type=int)

    args = parser.parse_args()
    cmdLine=True
    kepprf(args.infile,args.plotfile,args.rownum,args.columns,args.rows,args.fluxes,args.border,
           args.background,args.focus,args.prfdir,args.xtol,args.ftol,args.imscale,args.cmap,
           args.labcol,args.apercol,args.plot,args.verbose,args.logfile,args.status,cmdLine)
    
else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepprf.par")
    t = iraf.IrafTaskFactory(taskname="kepprf", value=parfile, function=kepprf)
Example #38
0
        for i in range(ys):
            x = numpy.arange(xs)
            rdata = data[i, :]
            rmask = mask[i, :]
            rmask = nd.minimum_filter(rmask, size=3)
            if rmask.any() == True:
                rdata = numpy.interp(x, x[rmask], rdata[rmask])
                data[i, rmask == 0] = rdata[rmask == 0]

    return data


def tran_func(a, xshift, yshift, xmag, ymag, xrot, yrot):
    xtran = ymag * a[0] * cos(yrot * pi / 180.0) \
        - xmag * a[1] * sin(xrot * pi / 180) \
        - yshift
    ytran = ymag * a[0] * sin(yrot * pi / 180.0) \
        + xmag * a[1] * cos(xrot * pi / 180) \
        - xshift
    return xtran, ytran


# -----------------------------------------------------------
# main code
if not iraf.deftask('saltmosaic'):
    parfile = iraf.osfn("saltred$saltmosaic.par")
    t = iraf.IrafTaskFactory(taskname="saltmosaic",
                             value=parfile,
                             function=saltmosaic,
                             pkgname='saltred')
Example #39
0
    parser.add_argument('--verbose',
                        action='store_true',
                        help='Write to a log file?')
    parser.add_argument('--logfile',
                        '-l',
                        help='Name of ascii log file',
                        default='kepcotrend.log',
                        dest='logfile',
                        type=str)
    parser.add_argument('--status',
                        '-e',
                        help='Exit status (0=good)',
                        default=0,
                        dest='status',
                        type=int)

    args = parser.parse_args()

    cmdLine = True

    kepwindow(args.infile, args.outfile, args.fcol, args.fmax, args.nfreq,
              args.plot, args.clobber, args.verbose, args.logfile, args.status,
              cmdLine)

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepwindow.par")
    t = iraf.IrafTaskFactory(taskname="kepwindow",
                             value=parfile,
                             function=kepwindow)
Example #40
0
    header.update('SCIEXT', sci_ext, comment='Extension of science frame')
    return pyfits.ImageHDU(data=data, header=header, name='BPM')


def createvariance(inhdu, sci_ext, var_ext):
    """Create a variance hdu from an input hdu"""

    # create the variance array
    data = inhdu.data.copy()
    if (data <= 0).any():
        j = numpy.where(data > 0)
        min_pos = data[j].min()
        j = numpy.where(data <= 0)
        data[j] = min_pos
    data = data ** 0.5

    header = inhdu.header.copy()
    header['EXTVER'] = var_ext
    header.update('SCIEXT', sci_ext, comment='Extension of science frame')
    return pyfits.ImageHDU(data=data, header=header, name='VAR')

# -----------------------------------------------------------
# main code

parfile = iraf.osfn("saltspec$specprepare.par")
t = iraf.IrafTaskFactory(
    taskname="specprepare",
    value=parfile,
    function=specprepare,
    pkgname='saltspec')
Example #41
0
             "of the number of frames per combined exposure (", frames_per_combined,
             ") so", total_frames % frames_per_combined, "frames from the end of "
             "the range will be left off.")
        toidx = toidx - (total_frames % frames_per_combined)
        total_frames = toidx - fromidx

    total_combined_frames = int(total_frames / frames_per_combined)
    info("Output data cube will have", total_combined_frames, "total frames of", newexposure, "sec exposure")
    info("Processing input data cube frames", fromidx, "to", toidx)
    target_dir = tempfile.mkdtemp()
    info("Created working directory {0} for intermediate data".format(target_dir))

    try:
        range_pairs = list((
            fromidx + n * frames_per_combined,
            fromidx + (n+1) * frames_per_combined - 1
        ) for n in range(0, total_combined_frames))
        
        frame_paths = combine_cube_frames(cubefile, range_pairs, target_dir)
        # frames_to_cube(frame_paths, outfile)
    finally:
        # Don't leave clutter if the task fails
        # shutil.rmtree(target_dir)
        info("Removed working directory {0}".format(target_dir))

# def frames_to_cube(frames, outfile):
    

parfile = iraf.osfn("aotools$aoavgcube.par")
t = iraf.IrafTaskFactory(taskname="aoavgcube", value=parfile, function=aoavgcube)
Example #42
0
    parser.add_argument('--verbose',
                        action='store_true',
                        help='Write to a log file?')
    parser.add_argument('--logfile',
                        '-l',
                        help='Name of ascii log file',
                        default='kepcotrend.log',
                        dest='logfile',
                        type=str)
    parser.add_argument('--status',
                        '-e',
                        help='Exit status (0=good)',
                        default=0,
                        dest='status',
                        type=int)

    args = parser.parse_args()

    cmdLine = True

    keptrial(args.infile, args.outfile, args.datacol, args.errcol, args.fmin,
             args.fmax, args.nfreq, args.method, args.ntrials, args.plot,
             args.clobber, args.verbose, args.logfile, args.status, cmdLine)

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$keptrial.par")
    t = iraf.IrafTaskFactory(taskname="keptrial",
                             value=parfile,
                             function=keptrial)
Example #43
0
            x = numpy.arange(xs)
            rdata = data[i, :]
            rmask = mask[i, :]
            rmask = nd.minimum_filter(rmask, size=3)
            if rmask.any() == True:
                rdata = numpy.interp(x, x[rmask], rdata[rmask])
                data[i, rmask == 0] = rdata[rmask == 0]

    return data


def tran_func(a, xshift, yshift, xmag, ymag, xrot, yrot):
    xtran = ymag * a[0] * cos(yrot * pi / 180.0) \
        - xmag * a[1] * sin(xrot * pi / 180) \
        - yshift
    ytran = ymag * a[0] * sin(yrot * pi / 180.0) \
        + xmag * a[1] * cos(xrot * pi / 180) \
        - xshift
    return xtran, ytran


# -----------------------------------------------------------
# main code
if not iraf.deftask('saltmosaic'):
    parfile = iraf.osfn("saltred$saltmosaic.par")
    t = iraf.IrafTaskFactory(
        taskname="saltmosaic",
        value=parfile,
        function=saltmosaic,
        pkgname='saltred')
Example #44
0
def flexure_rssspec(imagefits, fitslist, option=""):

    print str(datetime.now())

    if option == "filesave": prefix = raw_input("\nFile prefix: ")
    pixel = 15.  # pixel size in microns
    pix_scale = 0.125
    sexparams = ["X_IMAGE","Y_IMAGE","FLUX_ISO","FLUX_MAX","FLAGS","CLASS_STAR",    \
                "X2WIN_IMAGE","Y2WIN_IMAGE","XYWIN_IMAGE","ERRX2WIN_IMAGE"]
    np.savetxt("qred_thrufoc.param", sexparams, fmt="%s")
    fmaxcol, flagcol, xvarcol, yvarcol, xerrcol = (
        3, 4, 6, 7, 9)  # column nos (from 0) of data in sextractor
    imagestooclosefactor = 3.0  # too close if factor*sep < sqrt(var)
    gaptooclose = 1.25  # arcsec
    edgetooclose = 1.25  # arcsec
    rattolerance = 0.25
    toofaint = 250.  # FMAX counts
    galaxydelta = 0.4  # arcsec
    MOSimagelimit = 1.  # arcsec
    deblend = .005  # default

    imagehdr = pyfits.getheader(imagefits)
    if imagehdr["GR-STATE"][1] == "4":
        print "First fits file " + imagefits + " is not image of mask"
        exit()

    flexposns = len(fitslist)
    obsdict = obslog(fitslist)

    image_f = [fitslist[fpos].split(".")[0][-12:] for fpos in range(flexposns)]
    dateobs = obsdict["DATE-OBS"][0].replace("-", "")
    if int(dateobs) > 20110928:
        rho_f = np.array(obsdict["TRKRHO"]).astype(float)
    else:
        rho_f = np.array(obsdict["TELRHO"]).astype(float)

    catpos = np.argmin(np.abs(rho_f))
    cbin, rbin = np.array(obsdict["CCDSUM"][catpos].split(" ")).astype(int)
    maskid = obsdict["MASKID"][catpos].strip()
    filter = obsdict["FILTER"][catpos].strip()
    grating = obsdict["GRATING"][catpos].strip()
    rows, cols = pyfits.getdata(fitslist[catpos]).shape
    isspec = (obsdict["GR-STATE"][catpos][1] == "4")
    if not isspec:
        print "Use flexure_rssimage for image flexure analysis"
        exit()
    grang = float(obsdict["GRTILT"][catpos])
    artic = float(obsdict["CAMANG"][catpos])
    lamp = obsdict["LAMPID"][catpos].strip()

    print "\nMask:           ", maskid
    print "Filter:         ", filter
    print "Grating:        ", grating
    print "Artic (deg):    ", artic
    print "Gr Angle (deg): ", grang
    print "Lamp:           ", lamp

    #   map the mask spots _m using the imaging fits file
    sex_js = sextract(imagefits, deblend=deblend)
    flux_s = sex_js[2]
    fluxmedian = np.median(np.sort(flux_s)[-10:])
    okm_s = (flux_s > fluxmedian / 10)  # cull bogus spots
    maskholes = okm_s.sum()
    r_m = sex_js[1, okm_s]
    c_m = sex_js[0, okm_s]

    #   find mask rows _R, tabulate
    histr_b, binr_b = np.histogram(r_m, bins=rows / 10, range=(0, rows))
    bin0_R = np.where((histr_b[1:] > 0) & (histr_b[:-1] == 0))[0]
    bin1_R = np.where((histr_b[1:] == 0) & (histr_b[:-1] > 0))[0]
    maskRows = bin0_R.shape[0]
    bin_m = np.digitize(r_m, binr_b) - 1
    R_m = np.array([np.where((bin_m[m] >= bin0_R) & (bin_m[m] <= bin1_R))[0][0] \
                    for m in range(maskholes)])

    #   find mask cols _C, tabulate
    histc_b, binc_b = np.histogram(c_m, bins=cols / 10, range=(0, cols))
    bin0_C = np.where((histc_b[1:] > 0) & (histc_b[:-1] == 0))[0]
    bin1_C = np.where((histc_b[1:] == 0) & (histc_b[:-1] > 0))[0]
    maskCols = bin0_C.shape[0]
    bin_m = np.digitize(c_m, binc_b) - 1
    C_m = np.array([np.where((bin_m[m] >= bin0_C) & (bin_m[m] <= bin1_C))[0][0] \
                    for m in range(maskholes)])

    #   identify mask center = optical axis
    if maskid == 'P000000N99':  # symmetric mask
        Raxis = maskRows / 2
        Caxis = maskCols / 2
    elif maskid == 'P000000N03':  # mask with centered cross
        Raxis = np.where((np.argmax(histr_b) >= bin0_R)
                         & (np.argmax(histr_b) <= bin1_R))[0][0]
        Caxis = np.where((np.argmax(histc_b) >= bin0_C)
                         & (np.argmax(histc_b) <= bin1_C))[0][0]
    else:
        print "Not a valid flexure mask"
        exit()
    maxis = np.where((R_m == Raxis) & (C_m == Caxis))[0][0]
    raxis = r_m[maxis]
    caxis = c_m[maxis]

    print "\nMask_Holes Rows Cols  r axis   c axis \n                      pixels   pixels"
    print "  %5i %5i %5i %8.1f %8.1f" % (maskholes, maskRows, maskCols,
                                         raxis * rbin, caxis * cbin)

    #    np.savetxt(dateobs+'_'+"mask.txt",np.vstack((r_m,c_m,sex_js[2,okm_s],R_m)).T,fmt="%10.2f")

    #   get linelist, predict spots in spectral image
    wavcent = rsslam(grating, grang, artic, 0., dateobs)
    specfile = datedfile(datadir + "spectrograph/spec_yyyymmdd.txt", dateobs)
    FCampoly = np.loadtxt(specfile, usecols=(1, ))[5:11]
    fcam = np.polyval(FCampoly, (wavcent / 1000. - 4.))
    lampfile = iraf.osfn("pysalt$data/linelists/" + lamp + ".salt")
    wav_l, int_l = np.loadtxt(lampfile, unpack=True)

    maxdalpha = -np.degrees((cols / 2) * cbin * pixel / (1000. * fcam))
    maxgamma = np.degrees((rows / 2) * rbin * pixel / (1000. * fcam))
    maxwav = rsslam(grating, grang, artic, cols * cbin / 2, dateobs,
                    -maxdalpha, 0)
    minwav = rsslam(grating, grang, artic, -cols * cbin / 2, dateobs,
                    maxdalpha, maxgamma)
    ok_l = (wav_l >= minwav) & (wav_l <= maxwav)
    wav_l = wav_l[ok_l]
    int_l = int_l[ok_l]
    lines = wav_l.shape[0]
    col_ml = np.zeros((maskholes, lines))
    dcol_c = np.arange(-(cols * cbin / 2), (cols * cbin / 2))
    for m in range(maskholes):
        dalpha = -np.degrees((c_m[m] - caxis) * cbin * pixel / (1000. * fcam))
        gamma = np.degrees((r_m[m] - raxis) * rbin * pixel / (1000. * fcam))
        wav0, wav1 = rsslam(grating,
                            grang,
                            artic,
                            dcol_c[[0, -1]],
                            dateobs,
                            dalpha,
                            gamma=gamma)
        ok_l = ((wav_l > wav0) & (wav_l < wav1))
        colwav = interp1d(rsslam(grating,grang,artic,dcol_c,    \
                dateobs,dalpha=dalpha,gamma=gamma), dcol_c)
        col_ml[m, ok_l] = colwav(wav_l[ok_l]) + caxis * cbin

#    np.savetxt(dateobs+"_col_ml.txt",np.vstack((R_m,C_m,col_ml.T)),fmt="%8.1f")

#   identify mask hole and wavelength for spots in spec image closest to rho=0
    os.remove("sexwt.fits")
    sex_js = sextract(fitslist[catpos], "", deblend=deblend)
    r_s = sex_js[1]
    c_s = sex_js[0]
    flux_s = sex_js[2]
    spots = r_s.shape[0]
    fluxmedian = np.median(np.sort(sex_js[2])[-10:])
    ok_s = (flux_s > fluxmedian / 30)  # cull bogus spots

    #   find spectral bin rows RR in candidates R0, cull non-spectra
    histr_b, binr_b = np.histogram(r_s[ok_s], bins=rows / 10, range=(0, rows))
    histr_b[[0, -1]] = 0
    bin0_R0 = np.where((histr_b[1:] > 0) & (histr_b[:-1] == 0))[0] + 1
    bin1_R0 = np.where((histr_b[1:] == 0) & (histr_b[:-1] > 0))[0]
    bin_s = np.digitize(r_s, binr_b) - 1

    maxcount_R0 = np.array([(histr_b[bin0_R0[R0]:bin1_R0[R0]+1]).max() \
                        for R0 in range(bin0_R0.shape[0])])
    ok_R0 = (maxcount_R0 > 3)
    specrows = ok_R0.sum()  # cull down to spectra RR
    bin0_RR = bin0_R0[ok_R0]
    bin1_RR = bin1_R0[ok_R0]
    ok_s &= ((bin_s >= bin0_RR[:, None]) &
             (bin_s <= bin1_RR[:, None])).any(axis=0)
    RR_s = -np.ones(spots)
    r_RR = np.zeros(specrows)
    for RR in range(specrows):
        isRR_s = ok_s & np.in1d(bin_s, np.arange(bin0_RR[RR], bin1_RR[RR] + 1))
        RR_s[isRR_s] = RR
        r_RR[RR] = r_s[isRR_s].mean()
    count_RR = (RR_s[:, None] == range(specrows)).sum(axis=0)

    if maskid == 'P000000N99':
        RRaxis = np.argmin((raxis - r_RR)**2)
    elif maskid == 'P000000N03':
        RRaxis = np.argmax(count_RR)

#   cull weak lines
    ptile = 100. * min(
        1., 5. * maskCols / count_RR.max())  # want like 5 brightest lines
    for RR in range(specrows):
        isRR_s = ok_s & np.in1d(bin_s, np.arange(bin0_RR[RR], bin1_RR[RR] + 1))
        fluxmin = np.percentile(sex_js[2, isRR_s], 100. - ptile)
        ok_s[isRR_s] &= (sex_js[2, isRR_s] > fluxmin)

#   identify with mask rows R (assuming no gaps)
    RR_m = R_m + RRaxis - Raxis

    #   find approximate grating shift in dispersion direction by looking for most common id error
    histc_b = np.zeros(60)
    for RR in range(specrows):
        isRR_s = ((RR_s == RR) & ok_s)
        cerr_MS = (c_s[None, isRR_s] - col_ml[RR_m == RR].ravel()[:, None])
        histc_b += np.histogram(cerr_MS.ravel(), bins=60, range=(-150, 150))[0]
    cshift = 5 * np.argmax(histc_b) - 150
    col_ml += cshift

    #   identify wavelength and mask column with spots in each spectrum
    isfound_s = np.zeros((spots), dtype=bool)
    bintol = 16 / cbin  # 2 arcsec tolerance for line ID
    R_s = -np.ones(spots, dtype=int)
    C_s = -np.ones(spots, dtype=int)
    l_s = -np.ones(spots, dtype=int)
    m_s = -np.ones(spots, dtype=int)
    cerr_s = np.zeros(spots)
    rmscol = 0.

    for RR in range(specrows):  # _S spot in spectrum, _P (mask column, line)
        isRR_m = (RR_m == RR)
        isRR_s = ((RR_s == RR) & ok_s)
        cerr_PS = (c_s[None, isRR_s] - col_ml[isRR_m].ravel()[:, None])
        Spots = isRR_s.sum()
        Possibles = col_ml[isRR_m].size
        Cols = Possibles / lines
        P_S = np.argmin(np.abs(cerr_PS), axis=0)
        cerr_S = cerr_PS[P_S, range(isRR_s.sum())]
        isfound_S = (np.abs(cerr_S) < bintol)
        M_P, l_P = np.unravel_index(np.arange(Possibles), (Cols, lines))
        m_P = np.where(isRR_m)[0][M_P]
        m_S = m_P[P_S]
        C_P = C_m[m_P]
        C_S = C_P[P_S]
        l_S = l_P[P_S]
        s_S = np.where(isRR_s)[0]
        R_s[isRR_s] = RR + Raxis - RRaxis
        cerr_s[s_S] = cerr_S
        C_s[s_S[isfound_S]] = C_S[isfound_S]
        l_s[s_S[isfound_S]] = l_S[isfound_S]
        m_s[s_S[isfound_S]] = m_S[isfound_S]
        isfound_s[s_S] |= isfound_S
        rmscol += (cerr_S[isfound_S]**2).sum()

#   cull wavelengths to _L with < 1/2 Mask Rows or Cols
    ok_s &= isfound_s
    ok_l = np.zeros((lines), dtype=bool)
    for line in range(lines):
        lRows = np.unique(R_s[l_s == line]).shape[0]
        lCols = np.unique(C_s[l_s == line]).shape[0]
        ok_l[line] = ((lRows >= maskRows / 2) & (lCols >= maskCols / 2))
    l_L = np.where(ok_l)[0]
    wav_L = wav_l[l_L]
    Lines = l_L.shape[0]

    ok_s &= np.in1d(l_s, l_L)

    #   tabulate good catalog spots (final _S)
    s_S = np.where(ok_s)[0]
    r_S = r_s[s_S]
    c_S = c_s[s_S]
    cerr_S = cerr_s[s_S]
    R_S = R_s[s_S]
    C_S = C_s[s_S]
    l_S = l_s[s_S]
    Spots = ok_s.sum()

    rshift = r_S[R_S == Raxis].mean() - raxis
    cshift += (c_S - col_ml[m_s[s_S], l_S]).mean()
    rmscol = np.sqrt(rmscol / Spots)

    np.savetxt("cat_S.txt",np.vstack((s_S,r_S,c_S,R_S,C_S,l_S,cerr_S)).T,  \
        fmt="%5i %8.2f %8.2f %5i %5i %5i %8.2f")

    print "\nSpec_Spots Lines rshift  cshift      rms\n                pixels   pixels   pixels"
    print "  %5i %5i %8.1f %8.1f %8.1f" % (Spots, np.unique(l_S).shape[0],
                                           rshift, cshift, rmscol)
    print "\nLineno   Wavel   spots  Rows  Cols"
    for L in range(Lines):
        line = l_L[L]
        lRows = np.unique(R_S[l_S == line]).shape[0]
        lCols = np.unique(C_S[l_S == line]).shape[0]
        lspots = (l_S == line).sum()
        print " %5i %8.2f %5i %5i %5i" % (line, wav_l[line], lspots, lRows,
                                          lCols)

    sexcols = sex_js.shape[0]
    sexdata_jfS = np.zeros((sexcols, flexposns, Spots))
    sexdata_jfS[:, catpos] = sex_js[:, ok_s]
    xcenter_L = col_ml[maxis, l_L]
    ycenter = raxis + rshift
    if option == "filesave":
        np.savetxt(prefix+"Spots.txt",sexdata_jfS[:,catpos].T,   \
            fmt=2*"%9.2f "+"%9.0f "+"%9.1f "+"%4i "+"%6.2f "+3*"%7.2f "+"%11.3e")

#   find spots in flexure series, in order of increasing abs(rho), and store sextractor output

    row_fLd = np.zeros((flexposns, Lines, 2))
    col_fLd = np.zeros((flexposns, Lines, 2))

    print "\n     fits      rho  line  spots  rshift  cshift  rslope  cslope  rmserr "
    print "               deg   Ang         arcsec  arcsec  arcmin  arcmin   bins"

    for dirn in (1, -1):
        refpos = catpos
        posdirlist = np.argsort(dirn * rho_f)
        poslist = posdirlist[dirn * rho_f[posdirlist] > rho_f[refpos]]

        for fpos in poslist:
            col_S, row_S = sexdata_jfS[0:2, refpos, :]
            sex_js = sextract(fitslist[fpos], "sexwt.fits", deblend=deblend)

            binsqerr_sS = (sex_js[1, :, None] - row_S[None, :])**2 + (
                sex_js[0, :, None] - col_S[None, :])**2
            S_s = np.argmin(binsqerr_sS, axis=1)
            # First compute image shift by averaging small errors
            rowerr_s = sex_js[1] - row_S[S_s]
            colerr_s = sex_js[0] - col_S[S_s]
            hist_r, bin_r = np.histogram(rowerr_s,
                                         bins=32,
                                         range=(-2 * bintol, 2 * bintol))
            drow = rowerr_s[(rowerr_s > bin_r[np.argmax(hist_r)]-bintol) & \
                (rowerr_s < bin_r[np.argmax(hist_r)]+bintol)].mean()
            hist_c, bin_c = np.histogram(colerr_s,
                                         bins=32,
                                         range=(-2 * bintol, 2 * bintol))
            dcol = colerr_s[(colerr_s > bin_c[np.argmax(hist_c)]-bintol) & \
                (colerr_s < bin_c[np.argmax(hist_c)]+bintol)].mean()
            # Now refind the closest ID
            binsqerr_sS = (sex_js[1,:,None] - row_S[None,:] -drow)**2 + \
                (sex_js[0,:,None] - col_S[None,:] -dcol)**2
            binsqerr_s = binsqerr_sS.min(axis=1)
            isfound_s = binsqerr_s < bintol**2
            S_s = np.argmin(binsqerr_sS, axis=1)
            isfound_s &= (binsqerr_s == binsqerr_sS[:, S_s].min(axis=0))
            isfound_S = np.array([S in S_s[isfound_s] for S in range(Spots)])
            sexdata_jfS[:, fpos, S_s[isfound_s]] = sex_js[:, isfound_s]
            drow_S = sexdata_jfS[1, fpos] - sexdata_jfS[1, catpos]
            dcol_S = sexdata_jfS[0, fpos] - sexdata_jfS[0, catpos]

            #            np.savetxt("motion_"+str(fpos)+".txt",np.vstack((isfound_S,l_S,drow_S,dcol_S)).T,fmt="%3i %3i %8.2f %8.2f")

            # Compute flexure image motion parameters for each line
            for L in range(Lines):
                ok_S = ((l_S == l_L[L]) & isfound_S)
                row_fLd[fpos,L],rowchi,d,d,d = \
                  np.polyfit(sexdata_jfS[0,catpos,ok_S]-xcenter_L[L],drow_S[ok_S],deg=1,full=True)
                col_fLd[fpos,L],colchi,d,d,d = \
                  np.polyfit(sexdata_jfS[1,catpos,ok_S]-ycenter,dcol_S[ok_S],deg=1,full=True)
                rms = np.sqrt((rowchi + colchi) / (2 * ok_S.sum()))

                print ("%12s %5.0f %5i %5i "+5*"%7.2f ") % (image_f[fpos], rho_f[fpos], wav_L[L],  \
                    ok_S.sum(),row_fLd[fpos,L,1]*rbin*pix_scale, col_fLd[fpos,L,1]*cbin*pix_scale, \
                    60.*np.degrees(row_fLd[fpos,L,0]),-60.*np.degrees(col_fLd[fpos,L,0]), rms)
                if option == "filesave":
                    np.savetxt(prefix+"flex_"+str(fpos)+".txt",np.vstack((isfound_S,drow_S,dcol_S)).T,  \
                        fmt = "%2i %8.3f %8.3f")
                    np.savetxt(prefix + "sextr_" + str(fpos) + ".txt",
                               sexdata_jfS[:, fpos].T)
            print

#   make plots

    fig, plot_s = plt.subplots(2, 1, sharex=True)

    plt.xlabel('Rho (deg)')
    plt.xlim(-120, 120)
    plt.xticks(range(-120, 120, 30))
    fig.set_size_inches((8.5, 11))
    fig.subplots_adjust(left=0.175)

    plot_s[0].set_title(
        str(dateobs) + [" Imaging", " Spectral"][isspec] + " Flexure")
    plot_s[0].set_ylabel('Mean Position (arcsec)')
    plot_s[0].set_ylim(-0.5, 4.)
    plot_s[1].set_ylabel('Rotation (arcmin ccw)')
    plot_s[1].set_ylim(-10., 6.)

    lbl_L = [("%5.0f") % (wav_L[L]) for L in range(Lines)]
    color_L = 'bgrcmykw'
    for L in range(Lines):
        plot_s[0].plot(rho_f,row_fLd[:,L,1]*rbin*pix_scale,   \
              color=color_L[L],marker='D',markersize=8,label='row '+lbl_L[L])
        plot_s[1].plot(rho_f,
                       60. * np.degrees(row_fLd[:, L, 0]),
                       color=color_L[L],
                       marker='D',
                       markersize=8,
                       label='row ' + lbl_L[L])
    collbl = 'col' + lbl_L[0]
    for L in range(Lines):
        plot_s[0].plot(rho_f,col_fLd[:,L,1]*cbin*pix_scale,   \
              color=color_L[L],marker='s',markersize=8,label=collbl)
        plot_s[1].plot(rho_f,-60.*np.degrees(col_fLd[:,L,0]), \
              color=color_L[L],marker='s',markersize=8,label=collbl)
        collbl = ''
    plot_s[0].legend(fontsize='medium', loc='upper center')
    plotfile = str(dateobs) + ['_imflex.pdf', '_grflex.pdf'][isspec]
    plt.savefig(plotfile, orientation='portrait')

    if os.name == 'posix':
        if os.popen('ps -C evince -f').read().count(plotfile) == 0:
            os.system('evince ' + plotfile + ' &')

    os.remove("out.txt")
    os.remove("qred_thrufoc.param")
    os.remove("sexwt.fits")
    return
Example #45
0
                        type=str)
    parser.add_argument('--verbose',
                        action='store_true',
                        help='Write to a log file?')
    parser.add_argument('--logfile',
                        '-l',
                        help='Name of ascii log file',
                        default='kepcotrend.log',
                        dest='logfile',
                        type=str)
    parser.add_argument('--status',
                        '-e',
                        help='Exit status (0=good)',
                        default=0,
                        dest='status',
                        type=int)

    args = parser.parse_args()
    cmdLine = True

    kepmask(args.infile, args.maskfile, args.plotfile, args.tabrow, args.imin,
            args.imax, args.iscale, args.cmap, args.verbose, args.logfile,
            args.status, cmdLine)

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepmask.par")
    t = iraf.IrafTaskFactory(taskname="kepmask",
                             value=parfile,
                             function=kepmask)
Example #46
0
    kepmsg.clock(message,logfile,verbose)

# main
if '--shell' in sys.argv:
    import argparse
    
    parser = argparse.ArgumentParser(description='Append multiple month short cadence and/or multiple quarter long cadence data')
    parser.add_argument('--shell', action='store_true', help='Are we running from the shell?')
    parser.add_argument('infiles', help='List of input files', type=str)

    parser.add_argument('outfile', help='Name of FITS file to output', type=str)


    parser.add_argument('--clobber', action='store_true', help='Overwrite output file?')
    parser.add_argument('--verbose', action='store_true', help='Write to a log file?')
    parser.add_argument('--logfile', '-l', help='Name of ascii log file', default='kepcotrend.log', dest='logfile', type=str)
    parser.add_argument('--status', '-e', help='Exit status (0=good)', default=0, dest='status', type=int)


    args = parser.parse_args()
    
    

    kepstitch(args.infiles,args.outfile,args.clobber,args.verbose,args.logfile,args.status)
    

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepstitch.par")
    t = iraf.IrafTaskFactory(taskname="kepstitch", value=parfile, function=kepstitch)
Example #47
0
              x1=0
              y2=y2
              x2=x2
           else:
              y1=0
              x1=x2
              x2=x1+x2
        elif detector=='hrdet':
           y1=0
           if i%2==1: x1=0
           if i%2==0: 
              x1=x2
              x2=x1+x2
           if i>2:
              y1=y2
              y2=y1+y2 
              
              
        data[y1:y2,x1:x2]=hdu[i].data
    
    ihdu = pyfits.ImageHDU(data)
    nhdu = pyfits.HDUList([hdu[0], ihdu])

    return nhdu


if not iraf.deftask('hrsstack'):
    parfile = iraf.osfn("salthrs$hrsstack.par")
    t = iraf.IrafTaskFactory(taskname="hrsstack",value=parfile,function=hrsstack, pkgname='salthrs')

Example #48
0
    msg += '#gratilt=%s\n' % grasteps
    msg += '#arang=%s\n' % arang
    msg += '#artilt=%s\n' % arsteps
    msg += '#filter=%s\n' % rfilter.strip()
    if objid:
        msg += '#slitid=%s\n' % objid
    msg += '#Function=%s\n' % function
    msg += '#Order=%s\n' % order
    msg += '#Starting Data\n'
    dout.write(msg)

    for i in range(len(ws_arr)):
        if ws_arr[i, 0]:
            msg = '%5.2f ' % ws_arr[i, 0]
            msg += ' '.join(['%e' % k for k in ws_arr[i, 1:]])
            dout.write(msg + '\n')
    dout.write('\n')
    dout.close()

    return


# main code

parfile = iraf.osfn("saltspec$specarcstraighten.par")
t = iraf.IrafTaskFactory(
    taskname="specarcstraighten",
    value=parfile,
    function=specarcstraighten,
    pkgname='saltspec')
Example #49
0
from pyraf.iraf import pysalt
import os, string, sys, glob, pyfits, time

from PyQt4 import QtCore, QtGui
from pySlitMask import SlitMaskGui
from saltsafelog import logging, history

from salterror import SaltError    

debug=True


# -----------------------------------------------------------
# core routine

def masktool(catalog='', image='', logfile='salt.log', verbose=True):

   with logging(logfile,debug) as log:
       app = QtGui.QApplication([])
       myapp = SlitMaskGui(infile=catalog, inimage=image)
       myapp.show()
       app.exec_()

         

# -----------------------------------------------------------
# main code

parfile = iraf.osfn("proptools$masktool.par")
t = iraf.IrafTaskFactory(taskname="masktool",value=parfile,function=masktool, pkgname='proptools')
Example #50
0
    msg += '#grating=%s\n' % grating.strip()
    msg += '#graang=%s\n' % grang
    msg += '#gratilt=%s\n' % grasteps
    msg += '#arang=%s\n' % arang
    msg += '#artilt=%s\n' % arsteps
    msg += '#filter=%s\n' % rfilter.strip()
    if objid:
        msg += '#slitid=%s\n' % objid
    msg += '#Function=%s\n' % function
    msg += '#Order=%s\n' % order
    msg += '#Starting Data\n'
    dout.write(msg)

    for i in range(len(ws_arr)):
        if ws_arr[i, 0]:
            msg = '%5.2f ' % ws_arr[i, 0]
            msg += ' '.join(['%e' % k for k in ws_arr[i, 1:]])
            dout.write(msg + '\n')
    dout.write('\n')
    dout.close()

    return


# main code

if not iraf.deftask('specidentify'):
    parfile = iraf.osfn("saltspec$specidentify.par")
    t = iraf.IrafTaskFactory(
        taskname="specidentify", value=parfile, function=specidentify, pkgname='saltspec')
Example #51
0
def createbadpixel(inhdu, bphdu, sci_ext, bp_ext):
   """Create the bad pixel hdu from a bad pixel hdu"""
   if bphdu is None:
       data=inhdu[sci_ext].data*0.0
   else:
       infile=inhdu._HDUList__file.name
       bpfile=bphdu._HDUList__file.name
       
       if not saltkey.compare(inhdu[0], bphdu[0], 'INSTRUME', infile, bpfile):
           message = '%s and %s are not the same %s' % (infile,bpfile, 'INSTRUME')
           raise SaltError(message)
       for k in ['CCDSUM', 'NAXIS1', 'NAXIS2']:
           if not saltkey.compare(inhdu[sci_ext], bphdu[sci_ext], k, infile, bpfile):
                  message = '%s and %s are not the same %s' % (infile,bpfile, k)
                  raise SaltError(message)
       data=bphdu[sci_ext].data.copy()

   header=inhdu[sci_ext].header.copy()
   header['EXTVER']=bp_ext
   header.update('SCIEXT',sci_ext,comment='Extension of science frame')
   return pyfits.ImageHDU(data=data, header=header, name='BPM')



# -----------------------------------------------------------
# main code
if not iraf.deftask('saltprepare'):
   parfile = iraf.osfn("saltred$saltprepare.par")
   t = iraf.IrafTaskFactory(taskname="saltprepare",value=parfile,function=saltprepare, pkgname='saltred')
Example #52
0
    parser.add_argument('outfile', help='Name of FITS file to output', type=str)
    parser.add_argument('--period', help='Period to fold data upon [days]', type=float)
    parser.add_argument('--bjd0', help='time of zero phase for the folded period [BJD]', type=float) 
    parser.add_argument('--bindata', action='store_true', help='Bin output data?')
    parser.add_argument('--binmethod', default='mean', help='Binning method', type=str, choices=['mean','median','sigclip'])
    parser.add_argument('--threshold', default=1.0, help='Sigma clipping threshold [sigma]', type=float)
    parser.add_argument('--niter', default=5, help='Number of sigma clipping iterations before giving up', type=int)
    parser.add_argument('--nbins', default=1000, help='Number of period bins', type=int)
    parser.add_argument('--quality', action='store_true', help='Reject bad quality timestamps?')
    parser.add_argument('--plottype', default='sap', help='plot type', type=str, choices=['sap','pdc','cbv','det','none'])
    parser.add_argument('--plotlab', default='e$^-$ s$^{-1}$', help='Plot axis label', type=str)
    parser.add_argument('--clobber', action='store_true', help='Overwrite output file?')
    parser.add_argument('--verbose', action='store_true', help='Write to a log file?')
    parser.add_argument('--logfile', '-l', help='Name of ascii log file', default='kepcotrend.log', dest='logfile', type=str)
    parser.add_argument('--status', '-e', help='Exit status (0=good)', default=0, dest='status', type=int)

    args = parser.parse_args()
    
    cmdLine=True

    kepfold(args.infile,args.outfile,args.period,args.bjd0,args.bindata,args.binmethod,args.threshold,
            args.niter,args.nbins,args.quality,args.plottype,args.plotlab,args.clobber,args.verbose,
            args.logfile,args.status,cmdLine)
    

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepfold.par")
    t = iraf.IrafTaskFactory(taskname="kepfold", value=parfile, function=kepfold)

Example #53
0
def imred(rawdir, prodir, cleanup=True):
    print rawdir
    print prodir

    #get the name of the files
    infile_list=glob.glob(rawdir+'*.fits')
    infiles=','.join(['%s' % x for x in infile_list])
    

    #get the current date for the files
    obsdate=os.path.basename(infile_list[0])[1:9]
    print obsdate

    #set up some files that will be needed
    logfile='imred'+obsdate+'.log'
    flatimage='FLAT%s.fits' % (obsdate)
    dbfile='spec%s.db' % obsdate

    #create the observation log
    obs_dict=obslog(infile_list)

 
    #prepare the data
    saltprepare(infiles, '', 'p', createvar=False, badpixelimage='', clobber=True, logfile=logfile, verbose=True)

    #bias subtract the data
    saltbias('pP*fits', '', 'b', subover=True, trim=True, subbias=False, masterbias='',  
              median=False, function='polynomial', order=5, rej_lo=3.0, rej_hi=5.0, 
              niter=10, plotover=False, turbo=False, 
              clobber=True, logfile=logfile, verbose=True)

    #gain correct the data
    saltgain('bpP*fits', '', 'g', usedb=False, mult=True, clobber=True, logfile=logfile, verbose=True)

    #cross talk correct the data
    saltxtalk('gbpP*fits', '', 'x', xtalkfile = "", usedb=False, clobber=True, logfile=logfile, verbose=True)

    #cosmic ray clean the data
    #only clean the object data
    for i in range(len(infile_list)):
      if obs_dict['CCDTYPE'][i].count('OBJECT') and obs_dict['INSTRUME'][i].count('RSS'):
          img='xgbp'+os.path.basename(infile_list[i])
          saltcrclean(img, img, '', crtype='edge', thresh=5, mbox=11, bthresh=5.0,
                flux_ratio=0.2, bbox=25, gain=1.0, rdnoise=5.0, fthresh=5.0, bfactor=2,
                gbox=3, maxiter=5, multithread=True,  clobber=True, logfile=logfile, verbose=True)
 
    #flat field correct the data
    flat_imgs=''
    for i in range(len(infile_list)):
        if obs_dict['CCDTYPE'][i].count('FLAT'):
           if flat_imgs: flat_imgs += ','
           flat_imgs += 'xgbp'+os.path.basename(infile_list[i])

    if len(flat_imgs)!=0:
         saltcombine(flat_imgs,flatimage, method='median', reject=None, mask=False,    \
                weight=True, blank=0, scale='average', statsec='[200:300, 600:800]', lthresh=3,    \
                hthresh=3, clobber=True, logfile=logfile, verbose=True)
         #saltillum(flatimage, flatimage, '', mbox=11, clobber=True, logfile=logfile, verbose=True)

         saltflat('xgbpP*fits', '', 'f', flatimage, minflat=500, clobber=True, logfile=logfile, verbose=True)
    else:
         flats=None
         imfiles=glob.glob('xgbpP*fits')
         for f in imfiles:
             shutil.copy(f, 'f'+f)

    #mosaic the data
    geomfile=iraf.osfn("pysalt$data/rss/RSSgeom.dat")
    saltmosaic('fxgbpP*fits', '', 'm', geomfile, interp='linear', cleanup=True, geotran=True, clobber=True, logfile=logfile, verbose=True)

    #clean up the images
    if cleanup:
           for f in glob.glob('p*fits'): os.remove(f)
           for f in glob.glob('bp*fits'): os.remove(f)
           for f in glob.glob('gbp*fits'): os.remove(f)
           for f in glob.glob('xgbp*fits'): os.remove(f)
           for f in glob.glob('fxgbp*fits'): os.remove(f)
Example #54
0
    j=sol[i:].index('\n')
    return i+j+1
    

def findwskeyword(keyword, sol):
    """Find and return a value for a keyword in the list of the wavelength solution"""
    i=sol.index(keyword)
    j=sol[i:].index('\n')
    return sol[i:i+j].split('=')[1].strip()

def enterdatetime(dstr):
    """Break up the datetime string to create a datetime object
       return datetime
    """
    dlist=dstr.split() 
    year, month, day=dlist[0].split('-')
    hour, minute, second = dlist[1].split(':')
    
    return datetime.datetime(int(year), int(month), int(day), int(hour), int(minute),int(float(second)))

def subtracttime(d1, d2):
    """Return the difference in two dates in seconds"""
    dt=max(d1,d2)-min(d1,d2)
    return 86400*dt.days+dt.seconds


# main code 

parfile = iraf.osfn("saltspec$specrectify.par") 
t = iraf.IrafTaskFactory(taskname="specrectify",value=parfile,function=specrectify, pkgname='saltspec')
Example #55
0
                        help='Overwrite output file?')
    parser.add_argument('--verbose',
                        action='store_true',
                        help='Write to a log file?')
    parser.add_argument('--logfile',
                        '-l',
                        help='Name of ascii log file',
                        default='kepcotrend.log',
                        dest='logfile',
                        type=str)
    parser.add_argument('--status',
                        '-e',
                        help='Exit status (0=good)',
                        default=0,
                        dest='status',
                        type=int)

    args = parser.parse_args()

    cmdLine = True

    kepclip(args.infile, args.outfile, args.ranges, args.plot, args.plotcol,
            args.clobber, args.verbose, args.logfile, args.status, cmdLine)

else:
    from pyraf import iraf
    parfile = iraf.osfn("kepler$kepclip.par")
    t = iraf.IrafTaskFactory(taskname="kepclip",
                             value=parfile,
                             function=kepclip)
Example #56
0
    """Return the keyword value.  Throw a warning if it doesn't work """

    try:
        value = saltkey.get(keyword, struct)
        if isinstance(default, str): value = value.strip()
    except SaltIOError:
        value = default
        infile = struct._file.name
        message = 'WARNING: cannot find keyword %s in %s' % (keyword, infile)
        if warn and log: log.message(message, with_header=False)
    if (str(value).strip() == ''): value = default
    if (type(value) != type(default)):
        infile = struct._file.name
        message = 'WARNING: Type mismatch for %s for  %s in %s[0]' % (
            str(value), keyword, infile)
        message += '/n ' + str(type(value)) + ' ' + str(type(default))
        if warn and log: log.message(message, with_header=False)
        value = default

    return value


# -----------------------------------------------------------
# main code
if not iraf.deftask('saltobslog'):
    parfile = iraf.osfn("saltred$saltobslog.par")
    t = iraf.IrafTaskFactory(taskname="saltobslog",
                             value=parfile,
                             function=saltobslog,
                             pkgname='saltred')
Example #57
0
def flexure_rssspec(imagefits,fitslist,option=""):

    print str(datetime.now())

    if option == "filesave": prefix = raw_input("\nFile prefix: ")
    pixel = 15.                                                     # pixel size in microns
    pix_scale=0.125 
    sexparams = ["X_IMAGE","Y_IMAGE","FLUX_ISO","FLUX_MAX","FLAGS","CLASS_STAR",    \
                "X2WIN_IMAGE","Y2WIN_IMAGE","XYWIN_IMAGE","ERRX2WIN_IMAGE"]
    np.savetxt("qred_thrufoc.param",sexparams,fmt="%s")
    fmaxcol,flagcol,xvarcol,yvarcol,xerrcol = (3,4,6,7,9)           # column nos (from 0) of data in sextractor
    imagestooclosefactor = 3.0                                      # too close if factor*sep < sqrt(var)
    gaptooclose = 1.25                                              # arcsec
    edgetooclose = 1.25                                             # arcsec
    rattolerance = 0.25    
    toofaint = 250.                                                 # FMAX counts
    galaxydelta = 0.4                                               # arcsec
    MOSimagelimit = 1.                                              # arcsec
    deblend = .005                                                  # default

    imagehdr = pyfits.getheader(imagefits)
    if imagehdr["GR-STATE"][1] == "4":
        print "First fits file "+imagefits+" is not image of mask"
        exit()

    flexposns = len(fitslist)
    obsdict=obslog(fitslist)

    image_f = [fitslist[fpos].split(".")[0][-12:] for fpos in range(flexposns)]
    dateobs = obsdict["DATE-OBS"][0].replace("-","")
    if int(dateobs) > 20110928: rho_f = np.array(obsdict["TRKRHO"]).astype(float)
    else:                       rho_f = np.array(obsdict["TELRHO"]).astype(float)
    
    catpos = np.argmin(np.abs(rho_f))
    cbin,rbin = np.array(obsdict["CCDSUM"][catpos].split(" ")).astype(int)
    maskid =  obsdict["MASKID"][catpos].strip() 
    filter =  obsdict["FILTER"][catpos].strip()
    grating =  obsdict["GRATING"][catpos].strip()
    rows,cols = pyfits.getdata(fitslist[catpos]).shape
    isspec = (obsdict["GR-STATE"][catpos][1] =="4")
    if not isspec:
        print "Use flexure_rssimage for image flexure analysis"
        exit()
    grang = float(obsdict["GRTILT"][catpos])
    artic = float(obsdict["CAMANG"][catpos])
    lamp = obsdict["LAMPID"][catpos].strip()

    print "\nMask:           ", maskid
    print "Filter:         ", filter
    print "Grating:        ", grating
    print "Artic (deg):    ", artic
    print "Gr Angle (deg): ", grang
    print "Lamp:           ", lamp

#   map the mask spots _m using the imaging fits file
    sex_js = sextract(imagefits,deblend=deblend)
    flux_s = sex_js[2]
    fluxmedian = np.median(np.sort(flux_s)[-10:])
    okm_s = (flux_s > fluxmedian/10)               # cull bogus spots
    maskholes = okm_s.sum()
    r_m = sex_js[1,okm_s]
    c_m = sex_js[0,okm_s]

#   find mask rows _R, tabulate 
    histr_b, binr_b = np.histogram(r_m,bins=rows/10,range=(0,rows))
    bin0_R = np.where((histr_b[1:]>0) & (histr_b[:-1]==0))[0]
    bin1_R = np.where((histr_b[1:]==0) & (histr_b[:-1]>0))[0]
    maskRows = bin0_R.shape[0]
    bin_m = np.digitize(r_m,binr_b) - 1
    R_m = np.array([np.where((bin_m[m] >= bin0_R) & (bin_m[m] <= bin1_R))[0][0] \
                    for m in range(maskholes)])

#   find mask cols _C, tabulate 
    histc_b, binc_b = np.histogram(c_m,bins=cols/10,range=(0,cols))
    bin0_C = np.where((histc_b[1:]>0) & (histc_b[:-1]==0))[0]
    bin1_C = np.where((histc_b[1:]==0) & (histc_b[:-1]>0))[0]
    maskCols = bin0_C.shape[0]
    bin_m = np.digitize(c_m,binc_b) - 1
    C_m = np.array([np.where((bin_m[m] >= bin0_C) & (bin_m[m] <= bin1_C))[0][0] \
                    for m in range(maskholes)])

#   identify mask center = optical axis
    if maskid == 'P000000N99':              # symmetric mask
        Raxis = maskRows/2
        Caxis = maskCols/2
    elif maskid == 'P000000N03':            # mask with centered cross 
        Raxis = np.where((np.argmax(histr_b) >= bin0_R) & (np.argmax(histr_b) <= bin1_R))[0][0]  
        Caxis = np.where((np.argmax(histc_b) >= bin0_C) & (np.argmax(histc_b) <= bin1_C))[0][0]
    else:
        print "Not a valid flexure mask"
        exit()
    maxis = np.where((R_m==Raxis)&(C_m==Caxis))[0][0]
    raxis = r_m[maxis]
    caxis = c_m[maxis]

    print "\nMask_Holes Rows Cols  r axis   c axis \n                      pixels   pixels"
    print "  %5i %5i %5i %8.1f %8.1f" % (maskholes,maskRows,maskCols,raxis*rbin,caxis*cbin)

#    np.savetxt(dateobs+'_'+"mask.txt",np.vstack((r_m,c_m,sex_js[2,okm_s],R_m)).T,fmt="%10.2f")

#   get linelist, predict spots in spectral image
    wavcent = rsslam(grating, grang, artic, 0.,dateobs)
    specfile = datedfile(datadir+"spectrograph/spec_yyyymmdd.txt",dateobs)
    FCampoly=np.loadtxt(specfile,usecols=(1,))[5:11]
    fcam = np.polyval(FCampoly,(wavcent/1000. - 4.))
    lampfile=iraf.osfn("pysalt$data/linelists/"+lamp+".salt")
    wav_l,int_l = np.loadtxt(lampfile,unpack=True)

    maxdalpha = -np.degrees((cols/2)*cbin*pixel/(1000.*fcam))
    maxgamma = np.degrees((rows/2)*rbin*pixel/(1000.*fcam))
    maxwav = rsslam(grating,grang,artic, cols*cbin/2,dateobs,-maxdalpha,0)
    minwav = rsslam(grating,grang,artic,-cols*cbin/2,dateobs, maxdalpha,maxgamma)
    ok_l = (wav_l >= minwav) & (wav_l <= maxwav)
    wav_l = wav_l[ok_l]
    int_l = int_l[ok_l]
    lines = wav_l.shape[0]
    col_ml = np.zeros((maskholes,lines))
    dcol_c = np.arange(-(cols*cbin/2),(cols*cbin/2))
    for m in range(maskholes):
        dalpha = -np.degrees((c_m[m]-caxis)*cbin*pixel/(1000.*fcam))
        gamma = np.degrees((r_m[m]-raxis)*rbin*pixel/(1000.*fcam))
        wav0,wav1 = rsslam(grating,grang,artic,dcol_c[[0,-1]],dateobs,dalpha,gamma=gamma)
        ok_l = ((wav_l > wav0) & (wav_l < wav1))
        colwav = interp1d(rsslam(grating,grang,artic,dcol_c,    \
                dateobs,dalpha=dalpha,gamma=gamma), dcol_c)
        col_ml[m,ok_l] = colwav(wav_l[ok_l]) + caxis*cbin 

#    np.savetxt(dateobs+"_col_ml.txt",np.vstack((R_m,C_m,col_ml.T)),fmt="%8.1f")     

#   identify mask hole and wavelength for spots in spec image closest to rho=0
    os.remove("sexwt.fits")   
    sex_js = sextract(fitslist[catpos],"",deblend=deblend)
    r_s = sex_js[1]
    c_s = sex_js[0]
    flux_s = sex_js[2]
    spots = r_s.shape[0]
    fluxmedian = np.median(np.sort(sex_js[2])[-10:])
    ok_s = (flux_s > fluxmedian/30)                    # cull bogus spots
             
#   find spectral bin rows RR in candidates R0, cull non-spectra
    histr_b, binr_b = np.histogram(r_s[ok_s],bins=rows/10,range=(0,rows))
    histr_b[[0,-1]] = 0
    bin0_R0 = np.where((histr_b[1:]>0) & (histr_b[:-1]==0))[0] + 1
    bin1_R0 = np.where((histr_b[1:]==0) & (histr_b[:-1]>0))[0]
    bin_s = np.digitize(r_s,binr_b) - 1

    maxcount_R0 = np.array([(histr_b[bin0_R0[R0]:bin1_R0[R0]+1]).max() \
                        for R0 in range(bin0_R0.shape[0])])
    ok_R0 = (maxcount_R0 > 3)                              
    specrows = ok_R0.sum()                              # cull down to spectra RR
    bin0_RR = bin0_R0[ok_R0]
    bin1_RR = bin1_R0[ok_R0]
    ok_s &= ((bin_s >= bin0_RR[:,None]) & (bin_s <= bin1_RR[:,None])).any(axis=0)
    RR_s = -np.ones(spots)
    r_RR = np.zeros(specrows)
    for RR in range(specrows):
        isRR_s = ok_s & np.in1d(bin_s,np.arange(bin0_RR[RR],bin1_RR[RR]+1))
        RR_s[isRR_s] = RR
        r_RR[RR] = r_s[isRR_s].mean()
    count_RR = (RR_s[:,None]==range(specrows)).sum(axis=0)

    if maskid == 'P000000N99':              
        RRaxis = np.argmin((raxis-r_RR)**2)
    elif maskid == 'P000000N03': 
        RRaxis = np.argmax(count_RR)

#   cull weak lines
    ptile = 100.*min(1.,5.*maskCols/count_RR.max())  # want like 5 brightest lines
    for RR in range(specrows):
        isRR_s = ok_s & np.in1d(bin_s,np.arange(bin0_RR[RR],bin1_RR[RR]+1))
        fluxmin = np.percentile(sex_js[2,isRR_s],100.-ptile)
        ok_s[isRR_s] &= (sex_js[2,isRR_s] > fluxmin) 
     
#   identify with mask rows R (assuming no gaps)
    RR_m = R_m + RRaxis - Raxis

#   find approximate grating shift in dispersion direction by looking for most common id error
    histc_b = np.zeros(60)
    for RR in range(specrows):
        isRR_s = ((RR_s==RR) & ok_s)
        cerr_MS = (c_s[None,isRR_s] - col_ml[RR_m==RR].ravel()[:,None]) 
        histc_b += np.histogram(cerr_MS.ravel(),bins=60,range=(-150,150))[0]
    cshift = 5*np.argmax(histc_b) - 150
    col_ml += cshift   

#   identify wavelength and mask column with spots in each spectrum
    isfound_s = np.zeros((spots),dtype=bool)
    bintol = 16/cbin                                  # 2 arcsec tolerance for line ID
    R_s = -np.ones(spots,dtype=int)
    C_s = -np.ones(spots,dtype=int)
    l_s = -np.ones(spots,dtype=int)
    m_s = -np.ones(spots,dtype=int)
    cerr_s = np.zeros(spots)
    rmscol = 0.

    for RR in range(specrows):      # _S spot in spectrum, _P (mask column, line)
        isRR_m = (RR_m==RR) 
        isRR_s = ((RR_s==RR) & ok_s)
        cerr_PS = (c_s[None,isRR_s] - col_ml[isRR_m].ravel()[:,None])
        Spots = isRR_s.sum()
        Possibles = col_ml[isRR_m].size
        Cols = Possibles/lines
        P_S = np.argmin(np.abs(cerr_PS),axis=0)
        cerr_S = cerr_PS[P_S,range(isRR_s.sum())]
        isfound_S = (np.abs(cerr_S) < bintol)
        M_P,l_P = np.unravel_index(np.arange(Possibles),(Cols,lines))
        m_P = np.where(isRR_m)[0][M_P]
        m_S = m_P[P_S]
        C_P = C_m[m_P]
        C_S = C_P[P_S]
        l_S = l_P[P_S]
        s_S = np.where(isRR_s)[0]
        R_s[isRR_s] = RR + Raxis-RRaxis
        cerr_s[s_S] = cerr_S
        C_s[s_S[isfound_S]] = C_S[isfound_S]
        l_s[s_S[isfound_S]] = l_S[isfound_S]
        m_s[s_S[isfound_S]] = m_S[isfound_S]
        isfound_s[s_S] |= isfound_S
        rmscol += (cerr_S[isfound_S]**2).sum()

#   cull wavelengths to _L with < 1/2 Mask Rows or Cols 
    ok_s &= isfound_s
    ok_l = np.zeros((lines),dtype=bool)
    for line in range(lines):
        lRows = np.unique(R_s[l_s==line]).shape[0]
        lCols = np.unique(C_s[l_s==line]).shape[0]
        ok_l[line] = ((lRows>=maskRows/2) & (lCols>=maskCols/2))
    l_L = np.where(ok_l)[0]
    wav_L = wav_l[l_L]
    Lines = l_L.shape[0]

    ok_s &= np.in1d(l_s,l_L)

#   tabulate good catalog spots (final _S)
    s_S = np.where(ok_s)[0]
    r_S = r_s[s_S]
    c_S = c_s[s_S]
    cerr_S = cerr_s[s_S]
    R_S = R_s[s_S]
    C_S = C_s[s_S]
    l_S = l_s[s_S]
    Spots = ok_s.sum()

    rshift = r_S[R_S==Raxis].mean() - raxis
    cshift += (c_S - col_ml[m_s[s_S],l_S]).mean()
    rmscol = np.sqrt(rmscol/Spots)

    np.savetxt("cat_S.txt",np.vstack((s_S,r_S,c_S,R_S,C_S,l_S,cerr_S)).T,  \
        fmt="%5i %8.2f %8.2f %5i %5i %5i %8.2f") 

    print "\nSpec_Spots Lines rshift  cshift      rms\n                pixels   pixels   pixels"
    print "  %5i %5i %8.1f %8.1f %8.1f" % (Spots,np.unique(l_S).shape[0],rshift,cshift,rmscol)
    print "\nLineno   Wavel   spots  Rows  Cols"
    for L in range(Lines):
        line = l_L[L]
        lRows = np.unique(R_S[l_S==line]).shape[0]
        lCols = np.unique(C_S[l_S==line]).shape[0]
        lspots = (l_S==line).sum()
        print " %5i %8.2f %5i %5i %5i" % (line,wav_l[line],lspots,lRows,lCols)

    sexcols = sex_js.shape[0]
    sexdata_jfS = np.zeros((sexcols,flexposns,Spots))
    sexdata_jfS[:,catpos] = sex_js[:,ok_s]
    xcenter_L = col_ml[maxis,l_L]
    ycenter = raxis + rshift 
    if option == "filesave":
        np.savetxt(prefix+"Spots.txt",sexdata_jfS[:,catpos].T,   \
            fmt=2*"%9.2f "+"%9.0f "+"%9.1f "+"%4i "+"%6.2f "+3*"%7.2f "+"%11.3e")

#   find spots in flexure series, in order of increasing abs(rho), and store sextractor output

    row_fLd = np.zeros((flexposns,Lines,2))
    col_fLd = np.zeros((flexposns,Lines,2))

    print "\n     fits      rho  line  spots  rshift  cshift  rslope  cslope  rmserr "
    print   "               deg   Ang         arcsec  arcsec  arcmin  arcmin   bins"

    for dirn in (1,-1):
        refpos = catpos
        posdirlist = np.argsort(dirn*rho_f)
        poslist = posdirlist[dirn*rho_f[posdirlist] > rho_f[refpos]]

        for fpos in poslist:
            col_S,row_S = sexdata_jfS[0:2,refpos,:]   
            sex_js = sextract(fitslist[fpos],"sexwt.fits",deblend=deblend)     

            binsqerr_sS = (sex_js[1,:,None] - row_S[None,:])**2 + (sex_js[0,:,None] - col_S[None,:])**2
            S_s = np.argmin(binsqerr_sS,axis=1)
        # First compute image shift by averaging small errors
            rowerr_s = sex_js[1] - row_S[S_s]
            colerr_s = sex_js[0] - col_S[S_s]
            hist_r,bin_r = np.histogram(rowerr_s,bins=32,range=(-2*bintol,2*bintol))    
            drow = rowerr_s[(rowerr_s > bin_r[np.argmax(hist_r)]-bintol) & \
                (rowerr_s < bin_r[np.argmax(hist_r)]+bintol)].mean()
            hist_c,bin_c = np.histogram(colerr_s,bins=32,range=(-2*bintol,2*bintol))    
            dcol = colerr_s[(colerr_s > bin_c[np.argmax(hist_c)]-bintol) & \
                (colerr_s < bin_c[np.argmax(hist_c)]+bintol)].mean()
        # Now refind the closest ID
            binsqerr_sS = (sex_js[1,:,None] - row_S[None,:] -drow)**2 + \
                (sex_js[0,:,None] - col_S[None,:] -dcol)**2
            binsqerr_s = binsqerr_sS.min(axis=1)
            isfound_s = binsqerr_s < bintol**2
            S_s = np.argmin(binsqerr_sS,axis=1)
            isfound_s &= (binsqerr_s == binsqerr_sS[:,S_s].min(axis=0))
            isfound_S = np.array([S in S_s[isfound_s] for S in range(Spots)])
            sexdata_jfS[:,fpos,S_s[isfound_s]] = sex_js[:,isfound_s]
            drow_S = sexdata_jfS[1,fpos]-sexdata_jfS[1,catpos]
            dcol_S = sexdata_jfS[0,fpos]-sexdata_jfS[0,catpos]

#            np.savetxt("motion_"+str(fpos)+".txt",np.vstack((isfound_S,l_S,drow_S,dcol_S)).T,fmt="%3i %3i %8.2f %8.2f")

        # Compute flexure image motion parameters for each line
            for L in range(Lines):
                ok_S = ((l_S == l_L[L]) & isfound_S)
                row_fLd[fpos,L],rowchi,d,d,d = \
                  np.polyfit(sexdata_jfS[0,catpos,ok_S]-xcenter_L[L],drow_S[ok_S],deg=1,full=True)
                col_fLd[fpos,L],colchi,d,d,d = \
                  np.polyfit(sexdata_jfS[1,catpos,ok_S]-ycenter,dcol_S[ok_S],deg=1,full=True)
                rms = np.sqrt((rowchi+colchi)/(2*ok_S.sum()))

                print ("%12s %5.0f %5i %5i "+5*"%7.2f ") % (image_f[fpos], rho_f[fpos], wav_L[L],  \
                    ok_S.sum(),row_fLd[fpos,L,1]*rbin*pix_scale, col_fLd[fpos,L,1]*cbin*pix_scale, \
                    60.*np.degrees(row_fLd[fpos,L,0]),-60.*np.degrees(col_fLd[fpos,L,0]), rms)
                if option == "filesave":
                    np.savetxt(prefix+"flex_"+str(fpos)+".txt",np.vstack((isfound_S,drow_S,dcol_S)).T,  \
                        fmt = "%2i %8.3f %8.3f")
                    np.savetxt(prefix+"sextr_"+str(fpos)+".txt",sexdata_jfS[:,fpos].T)
            print 

#   make plots

    fig,plot_s = plt.subplots(2,1,sharex=True)

    plt.xlabel('Rho (deg)')
    plt.xlim(-120,120)
    plt.xticks(range(-120,120,30)) 
    fig.set_size_inches((8.5,11))
    fig.subplots_adjust(left=0.175)

    plot_s[0].set_title(str(dateobs)+[" Imaging"," Spectral"][isspec]+" Flexure") 
    plot_s[0].set_ylabel('Mean Position (arcsec)')
    plot_s[0].set_ylim(-0.5,4.)
    plot_s[1].set_ylabel('Rotation (arcmin ccw)')      
    plot_s[1].set_ylim(-10.,6.)

    lbl_L = [("%5.0f") % (wav_L[L]) for L in range(Lines)]
    color_L = 'bgrcmykw'
    for L in range(Lines):
      plot_s[0].plot(rho_f,row_fLd[:,L,1]*rbin*pix_scale,   \
            color=color_L[L],marker='D',markersize=8,label='row '+lbl_L[L])
      plot_s[1].plot(rho_f,60.*np.degrees(row_fLd[:,L,0]),  
            color=color_L[L],marker='D',markersize=8,label='row '+lbl_L[L])
    collbl = 'col'+lbl_L[0]
    for L in range(Lines): 
      plot_s[0].plot(rho_f,col_fLd[:,L,1]*cbin*pix_scale,   \
            color=color_L[L],marker='s',markersize=8,label=collbl)
      plot_s[1].plot(rho_f,-60.*np.degrees(col_fLd[:,L,0]), \
            color=color_L[L],marker='s',markersize=8,label=collbl)
      collbl = ''
    plot_s[0].legend(fontsize='medium',loc='upper center')                     
    plotfile = str(dateobs)+['_imflex.pdf','_grflex.pdf'][isspec]
    plt.savefig(plotfile,orientation='portrait')

    if os.name=='posix':
        if os.popen('ps -C evince -f').read().count(plotfile)==0: os.system('evince '+plotfile+' &')

    os.remove("out.txt")
    os.remove("qred_thrufoc.param") 
    os.remove("sexwt.fits")                           
    return
Example #58
0
    parser.add_argument('--verbose',
                        action='store_true',
                        help='Write to a log file?')
    parser.add_argument('--logfile',
                        '-l',
                        help='Name of ascii log file',
                        default='kepcotrend.log',
                        dest='logfile',
                        type=str)
    parser.add_argument('--status',
                        '-e',
                        help='Exit status (0=good)',
                        default=0,
                        dest='status',
                        type=int)

    args = parser.parse_args()

    kepcotrendsc(args.infile, args.outfile, args.bvfile, args.listbv,
                 args.fitmethod, args.fitpower, args.iterate, args.sigma,
                 args.maskfile, args.scinterp, args.plot, args.clobber,
                 args.verbose, args.logfile, args.status)

else:
    from pyraf import iraf

    parfile = iraf.osfn("kepler$kepcotrend.par")
    t = iraf.IrafTaskFactory(taskname="kepcotrend",
                             value=parfile,
                             function=kepcotrendsc)
Example #59
0
               saltkey.put('DATASEC',datasec,struct[bhdu])

       #subtract the master bias if necessary
       if subbias and bstruct:
           struct[i].data -= bstruct[i].data

           #update the variance frame
           if saltkey.found('VAREXT', struct[i]):
               vhdu=saltkey.get('VAREXT', struct[i])
               try:
                   vdata=struct[vhdu].data
                   struct[vhdu].data=vdata+bstruct[vhdu].data
               except Exception, e:
                    msg='Cannot update the variance frame in %s[%i] because %s' % (infile, vhdu, e)
                    raise SaltError(msg)
 
       

   if plotover: 
       plt.ioff()
       plt.show()

   return struct

# -----------------------------------------------------------
# main code

if not iraf.deftask('saltbias'):
  parfile = iraf.osfn("saltred$saltbias.par")
  t = iraf.IrafTaskFactory(taskname="saltbias",value=parfile,function=saltbias, pkgname='saltred')
Example #60
0
        print dir, ' directory with data'
        outfile = outfile.strip()
        if os.path.isfile(outfile):
            print 'output file exists, appending'
#            saltsafeio.delete(outfile)

# check whether the calibrate logfile is defined

        saltsafeio.filedefined('Log', calibratelogfile)

        # Get current working directory as the Fortran code changes dir
        startdir = os.getcwd()

        # If all looks OK, run the FORTRAN code

        calibrate_wrapper.calibrate(plottype, infile, outfile,
                                    calibratelogfile)

        # go back to starting directory
        os.chdir(startdir)


# -----------------------------------------------------------
# main code

parfile = iraf.osfn("saltfp$saltfpcalibrate.par")
t = iraf.IrafTaskFactory(taskname="saltfpcalibrate",
                         value=parfile,
                         function=saltfpcalibrate,
                         pkgname='saltfp')