Exemple #1
0
def preprocessdata(instrume, prefix,  obsdate, keyfile, log, logfile, verbose):
   """Run through all of the processing of the individual data files"""

   log.message('Beginning pre-processing of %s data' % instrume.upper())

   #set up the input path
   inpath=instrume+'/raw/'

   #creaate the product directory
   prodpath=instrume+'/product/'
   saltio.createdir(prodpath)

   # convert any slot mode binary data to FITS
   convertbin(inpath, iraf.osfn('pysalt$data/%s/%s_fits.config' % (instrume, instrume)), logfile, verbose)


   # fix sec keywords for data of unequal binning obtained before 2006 Aug 12
   if int(obsdate) < 20060812:
       pinfiles = instrume+'/raw/*.fits'
       log.message('Fixing SEC keywords in older data')
       log.message('SALTFIXSEC -- infiles=' + pinfiles)
       pipetools.saltfixsec(infiles=pinfiles)

   #fix the key words for the data set
   recfile = prodpath+prefix+ obsdate + 'KEYLOG.fits'
   img=','.join(glob.glob(inpath+prefix+'*fits'))
   if img:
       salteditkey(images=img,outimages=img,outpref='',keyfile=keyfile,recfile=recfile,
                       clobber=True,logfile=logfile,verbose=verbose)
Exemple #2
0
def saltarchive(obsdate,archpath, clobber,logfile,verbose):
   """Archive the SALT data.  Move the data to /salt/data after it has been run by the pipeline"""

   with logging(logfile,debug) as log:
       #check the entries
       saltio.argdefined('obsdate',obsdate)
       saltio.argdefined('archpath',archpath)

       # create the directory if it does not already exist
       if (not os.path.exists(archpath+obsdate[:4])):
           saltio.createdir(archpath+obsdate[:4])

       #cleen up the directory if it already exists
       archdir=archpath+obsdate[:4]+'/'+obsdate[4:]
       if (os.path.exists(archdir) and clobber):
           for root, dirs, files in os.walk(archpath+obsdate[:4]+'/'+obsdate[4:],topdown=False):
               for file in files:
                   os.remove(os.path.join(root,file))
               for dir in dirs:
                   os.rmdir(os.path.join(root,dir))
           os.rmdir(archpath+obsdate[:4]+'/'+obsdate[4:])
       elif (os.path.exists(archdir) and not clobber):
           message  = 'Cannot overwrite '+archpath+obsdate[:4]+'/'+obsdate[4:]
           raise SaltError(message)
       elif (not os.path.exists(archdir)):
            pass

       saltio.move(obsdate,archdir)
def saltdataquality(obsdate, sdbhost = "sdb.saao", sdbname = "sdb", sdbuser = "", 
                    password = '', clobber=False, logfile='salt.log',verbose=True):
   """For a given SALT data.  Move the data to /salt/data after it has been run by the pipeline"""
   #make a dataquality directory
   dqdir='dq%s' % obsdate
   if os.path.isdir(dqdir) and clobber:
      saltio.deletedir(dqdir)
   saltio.createdir(dqdir)
   saltio.changedir(dqdir)

   with logging(logfile,debug) as log:

       #check the entries
       saltio.argdefined('obsdate',str(obsdate))

       #open up the database
       sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password)


       #run for each instrument
       for instrume in ['rss', 'scam']:
           log.message('Measuring Data Quality for %s observations' % instrume.upper())
           dataquality(str(obsdate), sdb, instrume, clobber, logfile, verbose)
Exemple #4
0
def saltpipe(obsdate,pinames,archive,ftp,email,emserver,emuser,empasswd,bcc, qcpcuser,qcpcpasswd,
             ftpserver,ftpuser,ftppasswd,sdbhost, sdbname, sdbuser, sdbpass, elshost, elsname, 
             elsuser, elspass, median,function,order,rej_lo,rej_hi,niter,interp,
             clobber, runstatus, logfile,verbose):

   # set up

   basedir=os.getcwd()
   propcode=pinames
   sender = emuser + '@salt.ac.za'
   recipient = sender
   emessage = ''
   emailfile = '../piemaillist/email.lis'

   # check the observation date is sensible
   if ('/' in obsdate or '20' not in obsdate or len(obsdate) != 8):
       emessage = 'Observation date does not look sensible - YYYYMMDD\n'
       raise SaltError(emessage)


   # stop if the obsdate temporary directory already exists

   obsdir='%s' % obsdate
   if os.path.exists(obsdir):
       emessage += 'The temporary working directory ' + os.getcwd() + '/'
       emessage += obsdate + ' already exists. '
       raise SaltError(emessage)

   # create a temporary working directory and move to it
   saltio.createdir(obsdir)
   saltio.changedir(obsdir)
   workpath = saltio.abspath('.')

   # test the logfile
   logfile = workpath+logfile
   logfile = saltio.logname(logfile)

   #note the starttime
   starttime = time.time()

   #start logging
   with logging(logfile,debug) as log:

       #connect to the database
       sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, sdbpass)
       
       #get the nightinfo id   
       nightinfoid=saltmysql.getnightinfoid(sdb, obsdate)

       #Get the list of proposal codes
       state_select='Proposal_Code'
       state_tables='Proposal join ProposalCode using (ProposalCode_Id)'
       state_logic="current=1"
       records=saltmysql.select(sdb, state_select, state_tables, state_logic)
       propids=[k[0] for k in records]

       # Calculate the current date
       currentdate=salttime.currentobsdate()

       # are the arguments defined
       saltio.argdefined('obsdate',obsdate)

       # check email and ftp arguments are consistent
       if email and not ftp:
           message =  'ERROR: SALTPIPE -- cannot send email to PI(s) unless data is transferred '
           message += 'to the FTP server; use ftp=\'yes\' email=\'yes\''
           raise SaltError(message)

       # identify a potential list of keyword edits
       keyfile = '../newheadfiles/list_newhead_' + obsdate
       if not os.path.isfile(keyfile):
           message = '\nSALTPIPE -- keyword edits ' + keyfile + ' not found locally'
           log.message(message)


       # check directories for the raw RSS data
       rssrawpath = makerawdir(obsdate, 'rss')

       # check directories for the raw SALTICAM data
       scmrawpath = makerawdir(obsdate, 'scam')


       # check raw directories for the disk.file record and find last file number
       #check rss data
       lastrssnum = checkfordata(rssrawpath, 'P', obsdate, log)
       #check scame data
       lastscmnum = checkfordata(scmrawpath, 'S', obsdate, log)
       #check for HRS Data--not filedata yet, so cannot check
   
       if lastrssnum == 1 and lastscmnum == 1:
           message = 'SALTPIPE -- no SALTICAM or RSS data obtained on ' + obsdate
           emessage += '\n' + message + '\n'
           log.message(message)

       #copy the data to the working directory
       if lastrssnum > 1:
           message = 'Copy ' + rssrawpath + ' --> ' + workpath + 'raw/'
           log.message(message)
           saltio.copydir(rssrawpath,'rss/raw')
       if lastscmnum > 1:
           message = 'Copy ' + scmrawpath + ' --> ' + workpath + 'raw/'
           log.message(message)
           saltio.copydir(scmrawpath,'scam/raw')

       #copy and pre-process the HRS data
       try:
           hrsbrawpath = makerawdir(obsdate, 'hbdet')
           saltio.createdir('hrs')
           saltio.createdir('hrs/raw')
           message = 'Copy ' + hrsbrawpath + ' --> ' + workpath + 'raw/'
           log.message(message)
           salthrspreprocess(hrsbrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose)

           lasthrbnum=len(glob.glob('hrs/raw/*fits'))
       except Exception,e:
           log.message('Could not copy HRS data because %s' % e)
           lasthrbnum=0

       try:
           hrsrrawpath = makerawdir(obsdate, 'hrdet')
           message = 'Copy ' + hrsrrawpath + ' --> ' + workpath + 'raw/'
           log.message(message)
           salthrspreprocess(hrsrrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose)
           
           lasthrsnum=max(lasthrbnum, len(glob.glob('hrs/raw/*fits')))
       except Exception,e:
           log.message('Could not copy HRS data because %s' % e)
           lasthrsnum=lasthrbnum
Exemple #5
0
def saltpipe(obsdate,pinames,archive,ftp,email,emserver,emuser,empasswd,bcc, qcpcuser,qcpcpasswd,
             ftpserver,ftpuser,ftppasswd,sdbhost, sdbname, sdbuser, sdbpass, elshost, elsname, 
             elsuser, elspass, median,function,order,rej_lo,rej_hi,niter,interp,
             clobber, runstatus, logfile,verbose):

   # set up

   basedir=os.getcwd()
   propcode=pinames
   sender = emuser + '@salt.ac.za'
   recipient = sender
   emessage = ''
   emailfile = '../piemaillist/email.lis'

   # check the observation date is sensible
   if ('/' in obsdate or '20' not in obsdate or len(obsdate) != 8):
       emessage = 'Observation date does not look sensible - YYYYMMDD\n'
       raise SaltError(emessage)


   # stop if the obsdate temporary directory already exists

   obsdir='%s' % obsdate
   if os.path.exists(obsdir):
       emessage += 'The temporary working directory ' + os.getcwd() + '/'
       emessage += obsdate + ' already exists. '
       raise SaltError(emessage)

   # create a temporary working directory and move to it
   saltio.createdir(obsdir)
   saltio.changedir(obsdir)
   workpath = saltio.abspath('.')

   # test the logfile
   logfile = workpath+logfile
   logfile = saltio.logname(logfile)

   #note the starttime
   starttime = time.time()

   #start logging
   with logging(logfile,debug) as log:

       #connect to the database
       sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, sdbpass)
       
       #get the nightinfo id   
       nightinfoid=saltmysql.getnightinfoid(sdb, obsdate)

       #Get the list of proposal codes
       state_select='Proposal_Code'
       state_tables='Proposal join ProposalCode using (ProposalCode_Id)'
       state_logic="current=1"
       records=saltmysql.select(sdb, state_select, state_tables, state_logic)
       propids=[k[0] for k in records]

       # Calculate the current date
       currentdate=salttime.currentobsdate()

       # are the arguments defined
       saltio.argdefined('obsdate',obsdate)

       # check email and ftp arguments are consistent
       if email and not ftp:
           message =  'ERROR: SALTPIPE -- cannot send email to PI(s) unless data is transferred '
           message += 'to the FTP server; use ftp=\'yes\' email=\'yes\''
           raise SaltError(message)

       # identify a potential list of keyword edits
       keyfile = '../newheadfiles/list_newhead_' + obsdate
       if not os.path.isfile(keyfile):
           message = '\nSALTPIPE -- keyword edits ' + keyfile + ' not found locally'
           log.message(message)


       # check directories for the raw RSS data
       rssrawpath = makerawdir(obsdate, 'rss')

       # check directories for the raw SALTICAM data
       scmrawpath = makerawdir(obsdate, 'scam')


       # check raw directories for the disk.file record and find last file number
       #check rss data
       lastrssnum = checkfordata(rssrawpath, 'P', obsdate, log)
       #check scame data
       lastscmnum = checkfordata(scmrawpath, 'S', obsdate, log)
       #check for HRS Data--not filedata yet, so cannot check
   
       if lastrssnum == 1 and lastscmnum == 1:
           message = 'SALTPIPE -- no SALTICAM or RSS data obtained on ' + obsdate
           emessage += '\n' + message + '\n'
           log.message(message)

       #copy the data to the working directory
       if lastrssnum > 1:
           message = 'Copy ' + rssrawpath + ' --> ' + workpath + 'raw/'
           log.message(message)
           saltio.copydir(rssrawpath,'rss/raw')
       if lastscmnum > 1:
           message = 'Copy ' + scmrawpath + ' --> ' + workpath + 'raw/'
           log.message(message)
           saltio.copydir(scmrawpath,'scam/raw')

       #copy and pre-process the HRS data
       try:
           hrsbrawpath = makerawdir(obsdate, 'hbdet')
           saltio.createdir('hrs')
           saltio.createdir('hrs/raw')
           message = 'Copy ' + hrsbrawpath + ' --> ' + workpath + 'raw/'
           log.message(message)
           salthrspreprocess(hrsbrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose)

           hrsrrawpath = makerawdir(obsdate, 'hrdet')
           message = 'Copy ' + hrsrrawpath + ' --> ' + workpath + 'raw/'
           log.message(message)
           salthrspreprocess(hrsrrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose)
           
           lasthrsnum=len(glob.glob('hrs/raw/*fits'))
       except Exception,e:
           log.message('Could not copy HRS data because %s' % e)
           lasthrsnum=0

       if lastrssnum>1 or lastscmnum>1:
           message = 'Copy of data is complete'
           log.message(message)
       else:
           message = 'No data was taken on %s' % obsdate
           log.message(message)


       #process the data RSS data
       if lastrssnum>1:
           preprocessdata('rss', 'P', obsdate, keyfile, log, logfile, verbose)

       #process the  SCAM data
       if lastscmnum>1:
           preprocessdata('scam', 'S', obsdate, keyfile, log, logfile, verbose)

       #process the HRS data
       if lasthrsnum>1:
           preprocessdata('hrs', 'H', obsdate, keyfile, log, logfile, verbose)
           preprocessdata('hrs', 'R', obsdate, keyfile, log, logfile, verbose)
         


       #check that all data was given a proper proposal id
       #only do it for semesters after the start of science operations
       if int(obsdate)>=20110901:
           # Check to see that the PROPID keyword exists and if not add it
           message = '\nSALTPIPE -- Checking for PROPID keyword'
           log.message(message)

           #check rss data
           rssstatus=runcheckforpropid(glob.glob('rss/raw/P*.fits'), propids, log)
           #check scam data
           scmstatus=runcheckforpropid(glob.glob('scam/raw/S*.fits'), propids, log)
           #check hrsB data
           hrsbstatus=runcheckforpropid(glob.glob('hrs/raw/H*.fits'), propids, log)
           #check hrsB data
           hrsrstatus=runcheckforpropid(glob.glob('hrs/raw/R*.fits'), propids, log)

           if not rssstatus  or not scmstatus or not hrsbstatus or not hrsrstatus: 
               msg='The PROPIDs for these files needs to be updated and re-start the pipeline'
               raise SaltError("Invalid PROPID in images:"+msg)

       #process the RSS data
       rssrawsize, rssrawnum, rssprodsize, rssprodnum=processdata('rss', obsdate,  propcode, median, function, order, rej_lo, rej_hi, niter, interp,logfile, verbose)
      
       #advance process the data
       if rssrawnum > 0:
           advanceprocess('rss', obsdate,  propcode, median, function, order, rej_lo, rej_hi, niter, interp,sdbhost, sdbname, sdbuser, sdbpass, logfile, verbose)

       #process the SCAM data
       scmrawsize, scmrawnum, scmprodsize, scmprodnum=processdata('scam', obsdate, propcode,  median, function, order, rej_lo, rej_hi, niter, interp,logfile, verbose)

       #process the HRS data
       hrsrawsize, hrsrawnum, hrsprodsize, hrsprodnum=hrsprocess('hrs', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp, logfile, verbose)

       #upload the data to the database
       img_list=glob.glob(workpath+'scam/product/*bxgp*.fits')
       img_list.extend(glob.glob(workpath+'rss/product/*bxgp*.fits'))
       img_list.extend(glob.glob(workpath+'hrs/raw/*.fits'))
       if img_list:
           img=','.join('%s' %  (k) for k in img_list)
           saltsdbloadfits(images=img, sdbname=sdbname, sdbhost=sdbhost, sdbuser=sdbuser, \
                  password=sdbpass, logfile=logfile, verbose=verbose)

       #add junk sources to the database
       raw_list=glob.glob(workpath+'scam/raw/S*.fits')
       raw_list.extend(glob.glob(workpath+'rss/raw/P*.fits'))
       if raw_list:
          img=''
          for img in raw_list:
              hdu=pyfits.open(img)
              if hdu[0].header['PROPID'].strip()=='JUNK':
                saltsdbloadfits(images=img, sdbname=sdbname, sdbhost=sdbhost, sdbuser=sdbuser, \
                    password=sdbpass, logfile=logfile, verbose=verbose)
              hdu.close()
       

       # construct observation and pipeline documentation
       if lastrssnum > 1 and rssrawnum>0:
           rssobslog = 'rss/product/P' + obsdate + 'OBSLOG.fits'
       else:
           rssobslog = 'none'

       if lastscmnum > 1 and scmrawnum>0:
           scmobslog = 'scam/product/S' + obsdate + 'OBSLOG.fits'
       else:
           scmobslog = 'None'

       if lasthrsnum > 1 and hrsrawnum>0:
           hrsobslog = 'hrs/product/H' + obsdate + 'OBSLOG.fits'
       else:
           hrsobslog = 'None'



       if rssrawnum==0 and scmrawnum==0 and hrsrawnum==0:
           msg='No data  processed for  %s' % obsdate
           email=False
           ftp=False
           log.message(msg)

       htmlpath = '.'
       nightlog = '../nightlogfiles/' + obsdate + '.log'
       readme = iraf.osfn('pipetools$html/readme.template')
       if not os.path.isfile(nightlog):
           nightlog = ''
           message = 'No night log file ~/nightlogfiles/' + obsdate + '.log found'
           log.warning(message)

       if (rssrawnum > 0 or scmrawnum > 0 or hrsrawnum>0):
           salthtml(propcode=propcode,scamobslog=scmobslog,rssobslog=rssobslog, hrsobslog=hrsobslog, htmlpath=htmlpath,
                      nightlog=nightlog,readme=readme,clobber=True,logfile=logfile,
                      verbose=verbose)

       #add a pause to allow syncing of the databases
       time.sleep(10)

       #Add in the environmental information
       if (rssrawnum > 0 or scmrawnum > 0 or hrsrawnum>0):
           propids=saltmysql.getpropcodes(sdb, obsdate)
           for pid in propids:
               saltelsdata(pid, obsdate, elshost, elsname, elsuser, elspass,
                           sdbhost,sdbname,sdbuser, sdbpass, clobber, logfile,verbose) 
               try:
                  outfile='%s_%s_elsdata.fits' % (pid, obsdate)
                  outdir='%s/doc/' % (pid)
                  shutil.move(outfile, outdir)
               except:
                  os.remove(outfile)


       #ftp the data
       beachdir='/salt/ftparea/'
       if ftp:
           try:
               saltftp(propcode=propcode,obsdate=obsdate, datapath=workpath,
                   password=ftppasswd,beachdir=beachdir,sdbhost=sdbhost,
                   sdbname=sdbname,sdbuser=sdbuser,splitfiles=False, 
                   cleanup=True,clobber=True,logfile=logfile, verbose=verbose)
           except Exception,e:
               message="Not able to copy data to FTP area:\n%s " % e
               raise SaltError(message)     
           #run with the splitting of files
           try: 
               saltftp(propcode=propcode,obsdate=obsdate, datapath=workpath,
                   password=ftppasswd,beachdir=beachdir,sdbhost=sdbhost,
                   sdbname=sdbname,sdbuser=sdbuser,splitfiles=True, 
                   cleanup=True,clobber=True,logfile=logfile, verbose=verbose)
           except Exception,e:
               message="Not able to copy data to FTP area:\n%s " % e
               raise SaltError(message)     
def saltcalibrations(
    propcode,
    outfile=None,
    sdbhost="sdb.saao",
    sdbname="sdb",
    sdbuser="",
    password="",
    clobber=False,
    logfile="saltlog.log",
    verbose=True,
):
    """Seach the salt database for FITS files

    """

    with logging(logfile, debug) as log:

        # check the outfiles
        if not saltio.checkfornone(outfile):
            outfile = None

        # check that the output file can be deleted
        if outfile:
            saltio.overwrite(outfile, clobber)
            fout = open(oufile, "w")

        # connect to the database
        sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password)

        # determine the associated
        username = saltmysql.getpiptusername(sdb, propcode)
        userdir = "/salt/ftparea/%s/spst" % username
        if not os.path.exists(userdir):
            saltio.createdir(userdir)

        log.message("Will copy data to %s" % userdir)

        # Find all the data assocated with a proposal
        cmd_select = "d.FileName,d.FileData_Id,  CCDTYPE, d.DETMODE, d.OBSMODE, CCDSUM, GAINSET, ROSPEED, FILTER, GRATING, GRTILT, CAMANG, MASKID"
        cmd_table = """ FileData as d 
  left join FitsHeaderImage using (FileData_Id) 
  left join FitsHeaderRss using (FileData_Id) 
  left join ProposalCode using (ProposalCode_Id)
"""
        cmd_logic = 'Proposal_Code="%s" and CCDTYPE="OBJECT" and d.OBSMODE="SPECTROSCOPY"' % (propcode)

        record = saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic)

        # loop through all the results and return only the Set of identical results
        caldict = create_caldict(record)

        # prepare for writing out the results
        outstr = ""
        if outfile:
            fout.write(outstr + "\n")
        else:
            print outstr

        # now find all the cal_spst that have the same settings
        cmd_select = "d.FileName,d.FileData_Id,  CCDTYPE, d.DETMODE, d.OBSMODE, CCDSUM, GAINSET, ROSPEED, FILTER, GRATING, GRTILT, CAMANG, MASKID"
        cmd_table = """ FileData as d
  left join FitsHeaderImage using (FileData_Id) 
  left join FitsHeaderRss using (FileData_Id) 
  left join ProposalCode using (ProposalCode_Id)
"""
        for r in caldict:
            cmd_logic = "CCDSUM='%s' and GRATING='%s' and GRTILT='%s' and CAMANG='%s' and Proposal_Code='CAL_SPST'" % (
                caldict[r][3],
                caldict[r][7],
                caldict[r][8],
                caldict[r][9],
            )
            # cmd_logic="CCDSUM='%s' and GRATING='%s' and AR_STA='%s' " % (caldict[r][3], caldict[r][7], caldict[r][9])
            log.message(cmd_logic, with_header=False)
            record = saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic)
            # print record

            # write out hte results
            for r in record:
                outstr = " ".join(["%s" % x for x in r])
                if outfile:
                    fout.write(outstr + "\n")
                else:
                    log.message(outstr, with_header=False)

                # copy to the user directory
                cfile = makefilename(r[0], state="product")
                shutil.copy(cfile, userdir)
                cfile = makefilename(r[0], state="raw")
                shutil.copy(cfile, userdir)

        # close outfile
        if outfile:
            fout.close()
Exemple #7
0
def saltobsid(propcode,obslog,rawpath,prodpath,outpath,prefix='mbxgp', fprefix='bxgp',clobber=False,logfile='salt.log',verbose=True):
   """Split data into their different data directories
   """


   with logging(logfile,debug) as log:

       # are the arguments defined
       pids = saltio.argunpack('propcode',propcode)

       # check observation log file exists
       obslog = obslog.strip()
       saltio.fileexists(obslog)

       #open the observing log
       obstruct = saltio.openfits(obslog)
       obstab = saltio.readtab(obstruct[1],obslog)
       saltio.closefits(obstruct)

       #read in the file information
       filenames = saltstring.listfunc(obstab.field('filename'),'lstrip')
       instrumes = saltstring.listfunc(obstab.field('instrume'),'lstrip')
       proposers = saltstring.listfunc(obstab.field('proposer'),'clean')
       propids = saltstring.listfunc(obstab.field('propid'),'clean')
       ccdtypes = saltstring.listfunc(obstab.field('ccdtype'),'clean')
       ccdsums = saltstring.listfunc(obstab.field('ccdsum'),'clean')
       gainsets = saltstring.listfunc(obstab.field('gainset'),'clean')
       rospeeds = saltstring.listfunc(obstab.field('rospeed'),'clean')
       detmodes = saltstring.listfunc(obstab.field('detmode'),'clean')
       filters = saltstring.listfunc(obstab.field('filter'),'clean')
       gratings = saltstring.listfunc(obstab.field('grating'),'clean')
       gr_angles = obstab.field('gr-angle')
       ar_angles = obstab.field('ar-angle')
 
       # Create the list of proposals
       try:
           pids=saltio.cleanpropcode(pids, propids)
       except SaltIOError:
           #throw a warning adn exit if not data needs to be filterd
           log.warning('No data to filter\n', with_stdout=verbose)
           return

       # check paths exist, end with a "/" and convert them to absolute paths
       rawpath = saltio.abspath(rawpath)
       prodpath = saltio.abspath(prodpath)
       outpath = saltio.abspath(outpath)

       #create the symlink raw path
       rawsplit=rawpath.strip().split('/')
       symrawpath='../../%s/%s/' % (rawsplit[-3], rawsplit[-2])
       prodsplit=prodpath.strip().split('/')
       symprodpath='../../%s/%s/' % (prodsplit[-3], prodsplit[-2])
  

       # create PI directories
       for pid in pids:
           saltio.createdir(outpath+pid)
           saltio.createdir(outpath+pid+'/raw')
           saltio.createdir(outpath+pid+'/product')

       #copy the data that belongs to a pid into that directory
       log.message('SALTOBSID -- filtering images to proposal directories\n', with_stdout=verbose)

       #copy data for a given proposal to the raw and produce directories
       for i in range(len(obstab)):
           if os.path.exists(outpath+obstab[i]['propid']):
             if obstab[i]['object'].upper() not in ['ZERO', 'BIAS']:
               fname=obstab[i]['filename']
               pdir=obstab[i]['propid']
               detmode=obstab[i]['detmode']
               linkfiles(fname, pdir,detmode, symrawpath, symprodpath, outpath, prefix, fprefix, clobber)
               message='Copying %s to %s' % (fname, pdir)
               log.message(message, with_header=False, with_stdout=verbose)

       #look through the bias/flat/arc/standard data to see if there is any relavent data
       log.message('SALTOBSID -- filtering calibration files to proposal directories\n', with_stdout=verbose)

       caldata=['ZERO', 'FLAT', 'ARC']
       biasheader_list=['DETMODE', 'CCDSUM', 'GAINSET', 'ROSPEED']
       flatheader_list=['DETMODE', 'CCDSUM', 'GAINSET', 'ROSPEED', 'FILTER', 'GRATING', 'GR-ANGLE', 'AR-ANGLE']
       archeader_list=['OBSMODE', 'DETMODE', 'CCDSUM', 'GAINSET', 'ROSPEED', 'FILTER', 'GRATING', 'GR-ANGLE', 'AR-ANGLE']
       
       calproplist=['CAL_SPST']
       #Include bias frames
       log.message('SALTOBSID -- filtering bias files to proposal directories\n', with_stdout=verbose)

       for i in range(len(obstab)):
           fname=obstab[i]['filename']
           prop_list=[]
           #if it is a zero, check to see what other data have the same settings 
           if obstab[i]['CCDTYPE'].strip().upper()=='ZERO' or obstab[i]['OBJECT'].strip().upper() in ['BIAS', 'ZERO']:
               for j in range(len(obstab)):
                   if comparefiles(obstab[i], obstab[j], biasheader_list):
                       prop_list.append(obstab[i]['PROPID'])

           prop_list=saltio.removebadpids(set(prop_list))
           for pdir in prop_list:
                   detmode=obstab[i]['detmode']
                   linkfiles(fname, pdir, detmode, symrawpath, symprodpath, outpath,  fprefix, fprefix, clobber)
                   message='Copying %s to %s' % (fname, pdir)
                   log.message(message, with_header=False, with_stdout=verbose)

       #Include calibration  frames
       log.message('SALTOBSID -- filtering  calibration files to proposal directories\n', with_stdout=verbose)
 
       for i in range(len(obstab)):
           fname=obstab[i]['filename']
           prop_list=[]

           #if it is a flat, check to see what other data have the same settings 
           #this is turned off
           if obstab[i]['CCDTYPE'].strip().upper()=='FLAT' and False:
               for j in range(len(obstab)):
                   if comparefiles(obstab[i], obstab[j],  flatheader_list):
                       prop_list.append(obstab[j]['PROPID'])

           #if it is a arc, check to see what other data have the same settings 
           #this is turned off
           if obstab[i]['CCDTYPE'].strip().upper()=='ARC' and False:
               for j in range(len(obstab)):
                   if comparefiles(obstab[i], obstab[j],  archeader_list):
                       prop_list.append(obstab[j]['PROPID'])


           #if it is a calibration standard, see what other data have the same settings
           if obstab[i]['PROPID'].strip().upper() in calproplist:
               for j in range(len(obstab)):
                   if comparefiles(obstab[i], obstab[j],  flatheader_list):
                       prop_list.append(obstab[j]['PROPID'])


           prop_list=saltio.removebadpids(set(prop_list))
           for pdir in prop_list:
               if pdir!=obstab[i]['propid']:
                   detmode=obstab[i]['detmode']
                   linkfiles(fname, pdir, detmode, symrawpath, symprodpath, outpath,  prefix, fprefix, clobber)
                   message='Copying %s to %s' % (fname, pdir)
                   log.message(message, with_header=False, with_stdout=verbose)

       #Include master (bias or flat) frames
       log.message('SALTOBSID -- filtering master calibration files to proposal directories\n', with_stdout=verbose)
       masterlist=glob.glob(prodpath+'*Bias*')+glob.glob(prodpath+'*Flat*')
       for bimg in masterlist:
           struct=pyfits.open(bimg)
           bdict={}
           prop_list=[]
           for k in biasheader_list:
               bdict[k]=saltkey.get(k, struct[0])
           for i in range(len(obstab)):
               if comparefiles(obstab[i], bdict,  biasheader_list):
                   prop_list.append(obstab[i]['PROPID'])
           struct.close()

           #copy the files over to the directory
           prop_list=saltio.removebadpids(set(prop_list))
           for pdir in prop_list:
               fname=os.path.basename(bimg)
               infile = symprodpath+fname
               link = outpath+pdir+'/product/'+fname
               saltio.symlink(infile,link,clobber)
               message='Copying %s to %s' % (fname ,pdir)
               log.message(message, with_header=False, with_stdout=verbose)