def saltarchive(obsdate,archpath, clobber,logfile,verbose): """Archive the SALT data. Move the data to /salt/data after it has been run by the pipeline""" with logging(logfile,debug) as log: #check the entries saltio.argdefined('obsdate',obsdate) saltio.argdefined('archpath',archpath) # create the directory if it does not already exist if (not os.path.exists(archpath+obsdate[:4])): saltio.createdir(archpath+obsdate[:4]) #cleen up the directory if it already exists archdir=archpath+obsdate[:4]+'/'+obsdate[4:] if (os.path.exists(archdir) and clobber): for root, dirs, files in os.walk(archpath+obsdate[:4]+'/'+obsdate[4:],topdown=False): for file in files: os.remove(os.path.join(root,file)) for dir in dirs: os.rmdir(os.path.join(root,dir)) os.rmdir(archpath+obsdate[:4]+'/'+obsdate[4:]) elif (os.path.exists(archdir) and not clobber): message = 'Cannot overwrite '+archpath+obsdate[:4]+'/'+obsdate[4:] raise SaltError(message) elif (not os.path.exists(archdir)): pass saltio.move(obsdate,archdir)
def saltemail(propcode, obsdate,readme,server='mail.saao.ac.za',username='', password='', bcc='',sdbhost='sdb.saao',sdbname='sdb',sdbuser='******', logfile='salt.log',verbose=True): """For a given date, look into the database for observations made on that date. If propcode='all' send an email to all of the PIs that had data taken on that date. If propcode gives a specific proposal, only send emails to that PI. """ # set up sender = '*****@*****.**' bcclist = bcc.split(',') date_obss = [] propids = [] pis = [] pids = [] email = [] with logging(logfile,debug) as log: #set up the proposal list pids = saltio.argunpack('propcode',propcode) #check to see if the argument is defined saltio.argdefined('obsdate',obsdate) # check readme file exists saltio.fileexists(readme) #connect to the email server try: smtp = smtplib.SMTP() smtp.connect(server) smtp.ehlo() smtp.starttls() smtp.ehlo() except Exception, e: message = 'Cannot connect to %s because %s' % (server, e) log.error(message) try: smtp.login(username,password) except Exception, e: message = 'Cannot login to %s as %s because %s' % (server, username, e) log.error(message)
def saltdataquality(obsdate, sdbhost = "sdb.saao", sdbname = "sdb", sdbuser = "", password = '', clobber=False, logfile='salt.log',verbose=True): """For a given SALT data. Move the data to /salt/data after it has been run by the pipeline""" #make a dataquality directory dqdir='dq%s' % obsdate if os.path.isdir(dqdir) and clobber: saltio.deletedir(dqdir) saltio.createdir(dqdir) saltio.changedir(dqdir) with logging(logfile,debug) as log: #check the entries saltio.argdefined('obsdate',str(obsdate)) #open up the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #run for each instrument for instrume in ['rss', 'scam']: log.message('Measuring Data Quality for %s observations' % instrume.upper()) dataquality(str(obsdate), sdb, instrume, clobber, logfile, verbose)
def saltpipe(obsdate,pinames,archive,ftp,email,emserver,emuser,empasswd,bcc, qcpcuser,qcpcpasswd, ftpserver,ftpuser,ftppasswd,sdbhost, sdbname, sdbuser, sdbpass, elshost, elsname, elsuser, elspass, median,function,order,rej_lo,rej_hi,niter,interp, clobber, runstatus, logfile,verbose): # set up basedir=os.getcwd() propcode=pinames sender = emuser + '@salt.ac.za' recipient = sender emessage = '' emailfile = '../piemaillist/email.lis' # check the observation date is sensible if ('/' in obsdate or '20' not in obsdate or len(obsdate) != 8): emessage = 'Observation date does not look sensible - YYYYMMDD\n' raise SaltError(emessage) # stop if the obsdate temporary directory already exists obsdir='%s' % obsdate if os.path.exists(obsdir): emessage += 'The temporary working directory ' + os.getcwd() + '/' emessage += obsdate + ' already exists. ' raise SaltError(emessage) # create a temporary working directory and move to it saltio.createdir(obsdir) saltio.changedir(obsdir) workpath = saltio.abspath('.') # test the logfile logfile = workpath+logfile logfile = saltio.logname(logfile) #note the starttime starttime = time.time() #start logging with logging(logfile,debug) as log: #connect to the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, sdbpass) #get the nightinfo id nightinfoid=saltmysql.getnightinfoid(sdb, obsdate) #Get the list of proposal codes state_select='Proposal_Code' state_tables='Proposal join ProposalCode using (ProposalCode_Id)' state_logic="current=1" records=saltmysql.select(sdb, state_select, state_tables, state_logic) propids=[k[0] for k in records] # Calculate the current date currentdate=salttime.currentobsdate() # are the arguments defined saltio.argdefined('obsdate',obsdate) # check email and ftp arguments are consistent if email and not ftp: message = 'ERROR: SALTPIPE -- cannot send email to PI(s) unless data is transferred ' message += 'to the FTP server; use ftp=\'yes\' email=\'yes\'' raise SaltError(message) # identify a potential list of keyword edits keyfile = '../newheadfiles/list_newhead_' + obsdate if not os.path.isfile(keyfile): message = '\nSALTPIPE -- keyword edits ' + keyfile + ' not found locally' log.message(message) # check directories for the raw RSS data rssrawpath = makerawdir(obsdate, 'rss') # check directories for the raw SALTICAM data scmrawpath = makerawdir(obsdate, 'scam') # check raw directories for the disk.file record and find last file number #check rss data lastrssnum = checkfordata(rssrawpath, 'P', obsdate, log) #check scame data lastscmnum = checkfordata(scmrawpath, 'S', obsdate, log) #check for HRS Data--not filedata yet, so cannot check if lastrssnum == 1 and lastscmnum == 1: message = 'SALTPIPE -- no SALTICAM or RSS data obtained on ' + obsdate emessage += '\n' + message + '\n' log.message(message) #copy the data to the working directory if lastrssnum > 1: message = 'Copy ' + rssrawpath + ' --> ' + workpath + 'raw/' log.message(message) saltio.copydir(rssrawpath,'rss/raw') if lastscmnum > 1: message = 'Copy ' + scmrawpath + ' --> ' + workpath + 'raw/' log.message(message) saltio.copydir(scmrawpath,'scam/raw') #copy and pre-process the HRS data try: hrsbrawpath = makerawdir(obsdate, 'hbdet') saltio.createdir('hrs') saltio.createdir('hrs/raw') message = 'Copy ' + hrsbrawpath + ' --> ' + workpath + 'raw/' log.message(message) salthrspreprocess(hrsbrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose) lasthrbnum=len(glob.glob('hrs/raw/*fits')) except Exception,e: log.message('Could not copy HRS data because %s' % e) lasthrbnum=0 try: hrsrrawpath = makerawdir(obsdate, 'hrdet') message = 'Copy ' + hrsrrawpath + ' --> ' + workpath + 'raw/' log.message(message) salthrspreprocess(hrsrrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose) lasthrsnum=max(lasthrbnum, len(glob.glob('hrs/raw/*fits'))) except Exception,e: log.message('Could not copy HRS data because %s' % e) lasthrsnum=lasthrbnum
def slotmerge(images, outimages, outpref, geomfile, clobber, logfile, verbose): with logging(logfile, debug) as log: # are the arguments defined saltsafeio.argdefined('images', images) saltsafeio.argdefined('geomfile', geomfile) saltsafeio.argdefined('logfile', logfile) # if the input file is a list, does it exist? if images[0] == '@': saltsafeio.listexists('Input', images) # parse list of input files infiles = saltsafeio.listparse('Raw image', images, '', '', '') # check input files exist saltsafeio.filesexist(infiles, '', 'r') # load output name list: @list, * and comma separated outimages = outimages.strip() outpref = outpref.strip() if len(outpref) == 0 and len(outimages) == 0: raise SaltIOError('Output file(s) not specified') # test output @filelist exists if len(outimages) > 0 and outimages[0] == '@': saltsafeio.listexists('Output', outimages) # parse list of output files outfiles = saltsafeio.listparse('Output image', outimages, outpref, infiles, '') # are input and output lists the same length? saltsafeio.comparelists(infiles, outfiles, 'Input', 'output') # do the output files already exist? if not clobber: saltsafeio.filesexist(outfiles, '', 'w') # does CCD geometry definition file exist geomfilefile = geomfile.strip() saltsafeio.fileexists(geomfile) # read geometry definition file gap = 0 xshift = [0, 0] yshift = [0, 0] rotation = [0, 0] gap, xshift, yshift, rotation = saltsafeio.readccdgeom(geomfile) for ro in rotation: if ro != 0: log.warning('SLOTMERGE currently ignores CCD rotation') # Begin processes each file for infile, outfile in zip(infiles, outfiles): # determine the name for the output file outpath = outfile.rstrip(os.path.basename(outfile)) if (len(outpath) == 0): outpath = '.' # open each raw image struct = saltsafeio.openfits(infile) # identify instrument instrume, keyprep, keygain, keybias, keyxtalk, keyslot = saltsafekey.instrumid( struct, infile) # how many amplifiers? nccds = saltsafekey.get('NCCDS', struct[0], infile) amplifiers = nccds * 2 #if (nccds != 2): # raise SaltError('Can not currently handle more than two CCDs') # CCD geometry coefficients if instrume == 'RSS' or instrume == 'PFIS': xsh = [xshift[0], 0., xshift[1]] ysh = [yshift[0], 0., yshift[1]] rot = [rotation[0], 0., rotation[1]] refid = 1 if instrume == 'SALTICAM': xsh = [xshift[0], 0.] ysh = [yshift[0], 0.] rot = [rotation[0], 0] refid = 1 # how many extensions? nextend = saltsafekey.get('NEXTEND', struct[0], infile) # how many exposures exposures = nextend / amplifiers # CCD on-chip binning xbin, ybin = saltsafekey.ccdbin(struct[0], infile) gp = int(gap / xbin) # create output hdu structure outstruct = [None] * int(exposures + 1) outstruct[0] = struct[0] # iterate over exposures, stitch them to produce file of CCD images for i in range(exposures): # Determine the total size of the image xsize = 0 ysize = 0 for j in range(amplifiers): hdu = i * amplifiers + j + 1 try: xsize += len(struct[hdu].data[0]) if ysize < len(struct[hdu].data): ysize = len(struct[hdu].data) except: msg = 'Unable to access extension %i ' % hdu raise SaltIOError(msg) xsize += gp * (nccds - 1) maxxsh, minxsh = determineshifts(xsh) maxysh, minysh = determineshifts(ysh) xsize += (maxxsh - minxsh) ysize += (maxysh - minysh) # Determine the x and y origins for each frame xdist = 0 ydist = 0 shid = 0 x0 = np.zeros(amplifiers) y0 = np.zeros(amplifiers) for j in range(amplifiers): x0[j] = xdist + xsh[shid] - minxsh y0[j] = ysh[shid] - minysh hdu = i * amplifiers + j + 1 darr = struct[hdu].data xdist += len(darr[0]) if j % 2 == 1: xdist += gp shid += 1 # make the out image outarr = np.zeros((ysize, xsize), np.float64) # Embed each frame into the output array for j in range(amplifiers): hdu = i * amplifiers + j + 1 darr = struct[hdu].data outarr = salttran.embed(darr, x0[j], y0[j], outarr) # Add the outimage to the output structure hdu = i * amplifiers + 1 outhdu = i + 1 outstruct[outhdu] = pyfits.ImageHDU(outarr) outstruct[outhdu].header = struct[hdu].header # Fix the headers in each extension datasec = '[1:%4i,1:%4i]' % (xsize, ysize) saltsafekey.put('DATASEC', datasec, outstruct[outhdu], outfile) saltsafekey.rem('DETSIZE', outstruct[outhdu], outfile) saltsafekey.rem('DETSEC', outstruct[outhdu], outfile) saltsafekey.rem('CCDSEC', outstruct[outhdu], outfile) saltsafekey.rem('AMPSEC', outstruct[outhdu], outfile) # add housekeeping key words outstruct[outhdu] = addhousekeeping(outstruct[outhdu], outhdu, outfile) # close input FITS file saltsafeio.closefits(struct) # housekeeping keywords keymosaic = 'SLOTMERG' fname, hist = history(level=1, wrap=False) saltsafekey.housekeeping(struct[0], keymosaic, 'Amplifiers have been mosaiced', hist) #saltsafekey.history(outstruct[0],hist) # this is added for later use by saltsafekey.put('NCCDS', 0.5, outstruct[0]) saltsafekey.put('NSCIEXT', exposures, outstruct[0]) saltsafekey.put('NEXTEND', exposures, outstruct[0]) # write FITS file of mosaiced image outstruct = pyfits.HDUList(outstruct) saltsafeio.writefits(outstruct, outfile, clobber=clobber)
def salteditkey(images,outimages,outpref, keyfile, recfile=None,clobber=False,logfile='salt.log',verbose=True): with logging(logfile,debug) as log: # Check the input images infiles = saltio.argunpack ('Input',images) # create list of output files outfiles=saltio.listparse('Outfile', outimages, outpref,infiles,'') #verify that the input and output lists are the same length saltio.comparelists(infiles,outfiles,'Input','output') #is key file defined saltio.argdefined('keyfile',keyfile) keyfile = keyfile.strip() saltio.fileexists(keyfile) # if the data are the same, set up to use update instead of write openmode='copyonwrite' if (infiles!=outfiles): openmode='copyonwrite' # determine the date of the observations obsdate=saltstring.makeobsdatestr(infiles, 1,9) if len(obsdate)!=8: message = 'Either FITS files from multiple dates exist, ' message += 'or raw FITS files exist with non-standard names.' log.warning(message) # FITS file columns to record keyword changes fitcol = [] keycol = [] oldcol = [] newcol = [] # Set up the rules to change the files keyedits=readkeyfile(keyfile, log=log, verbose=verbose) #now step through the images for img, oimg in zip(infiles, outfiles): #determine the appropriate keyword edits for the image klist=[] for frange in keyedits: if checkfitsfile(img, frange, keyedits[frange]): klist.append(keyedits[frange][3]) if klist: #open up the new files struct = saltio.openfits(img,mode=openmode) struct.verify('fix') for kdict in klist: for keyword in kdict: #record the changes value=kdict[keyword] fitcol.append(img) keycol.append(keyword) newcol.append(value) try: oldcol.append(struct[0].header[keyword].lstrip()) except: oldcol.append('None') #update the keyword if saltkey.found(keyword, struct[0]): try: saltkey.put(keyword,value,struct[0]) message='\tUpdating %s in %s to %s' % (keyword, os.path.basename(img), value) log.message(message, with_header=False, with_stdout=verbose) except Exception, e: message = 'Could not update %s in %s because %s' % (keyword, img, str(e)) raise SaltError(message) else: try: saltkey.new(keyword.strip(),value,'Added Comment',struct[0]) message='\tAdding %s in %s to %s' % (keyword, os.path.basename(img), value) log.message(message, with_header=False, with_stdout=verbose) except Exception,e : message = 'Could not update %s in %s because %s' % (keyword, img, str(e)) raise SaltError(message) #updat the history keywords #fname, hist=history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) #saltkey.housekeeping(struct[0],'SAL-EDT', 'Keywords updated by SALTEDITKEY', hist) #write the file out if openmode=='update': saltio.updatefits(struct) message = 'Updated file ' + os.path.basename(oimg) else: saltio.writefits(struct, oimg, clobber) message = 'Created file ' + os.path.basename(oimg) log.message(message, with_header=False, with_stdout=True) struct.close()
def saltpipe(obsdate,pinames,archive,ftp,email,emserver,emuser,empasswd,bcc, qcpcuser,qcpcpasswd, ftpserver,ftpuser,ftppasswd,sdbhost, sdbname, sdbuser, sdbpass, elshost, elsname, elsuser, elspass, median,function,order,rej_lo,rej_hi,niter,interp, clobber, runstatus, logfile,verbose): # set up basedir=os.getcwd() propcode=pinames sender = emuser + '@salt.ac.za' recipient = sender emessage = '' emailfile = '../piemaillist/email.lis' # check the observation date is sensible if ('/' in obsdate or '20' not in obsdate or len(obsdate) != 8): emessage = 'Observation date does not look sensible - YYYYMMDD\n' raise SaltError(emessage) # stop if the obsdate temporary directory already exists obsdir='%s' % obsdate if os.path.exists(obsdir): emessage += 'The temporary working directory ' + os.getcwd() + '/' emessage += obsdate + ' already exists. ' raise SaltError(emessage) # create a temporary working directory and move to it saltio.createdir(obsdir) saltio.changedir(obsdir) workpath = saltio.abspath('.') # test the logfile logfile = workpath+logfile logfile = saltio.logname(logfile) #note the starttime starttime = time.time() #start logging with logging(logfile,debug) as log: #connect to the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, sdbpass) #get the nightinfo id nightinfoid=saltmysql.getnightinfoid(sdb, obsdate) #Get the list of proposal codes state_select='Proposal_Code' state_tables='Proposal join ProposalCode using (ProposalCode_Id)' state_logic="current=1" records=saltmysql.select(sdb, state_select, state_tables, state_logic) propids=[k[0] for k in records] # Calculate the current date currentdate=salttime.currentobsdate() # are the arguments defined saltio.argdefined('obsdate',obsdate) # check email and ftp arguments are consistent if email and not ftp: message = 'ERROR: SALTPIPE -- cannot send email to PI(s) unless data is transferred ' message += 'to the FTP server; use ftp=\'yes\' email=\'yes\'' raise SaltError(message) # identify a potential list of keyword edits keyfile = '../newheadfiles/list_newhead_' + obsdate if not os.path.isfile(keyfile): message = '\nSALTPIPE -- keyword edits ' + keyfile + ' not found locally' log.message(message) # check directories for the raw RSS data rssrawpath = makerawdir(obsdate, 'rss') # check directories for the raw SALTICAM data scmrawpath = makerawdir(obsdate, 'scam') # check raw directories for the disk.file record and find last file number #check rss data lastrssnum = checkfordata(rssrawpath, 'P', obsdate, log) #check scame data lastscmnum = checkfordata(scmrawpath, 'S', obsdate, log) #check for HRS Data--not filedata yet, so cannot check if lastrssnum == 1 and lastscmnum == 1: message = 'SALTPIPE -- no SALTICAM or RSS data obtained on ' + obsdate emessage += '\n' + message + '\n' log.message(message) #copy the data to the working directory if lastrssnum > 1: message = 'Copy ' + rssrawpath + ' --> ' + workpath + 'raw/' log.message(message) saltio.copydir(rssrawpath,'rss/raw') if lastscmnum > 1: message = 'Copy ' + scmrawpath + ' --> ' + workpath + 'raw/' log.message(message) saltio.copydir(scmrawpath,'scam/raw') #copy and pre-process the HRS data try: hrsbrawpath = makerawdir(obsdate, 'hbdet') saltio.createdir('hrs') saltio.createdir('hrs/raw') message = 'Copy ' + hrsbrawpath + ' --> ' + workpath + 'raw/' log.message(message) salthrspreprocess(hrsbrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose) hrsrrawpath = makerawdir(obsdate, 'hrdet') message = 'Copy ' + hrsrrawpath + ' --> ' + workpath + 'raw/' log.message(message) salthrspreprocess(hrsrrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose) lasthrsnum=len(glob.glob('hrs/raw/*fits')) except Exception,e: log.message('Could not copy HRS data because %s' % e) lasthrsnum=0 if lastrssnum>1 or lastscmnum>1: message = 'Copy of data is complete' log.message(message) else: message = 'No data was taken on %s' % obsdate log.message(message) #process the data RSS data if lastrssnum>1: preprocessdata('rss', 'P', obsdate, keyfile, log, logfile, verbose) #process the SCAM data if lastscmnum>1: preprocessdata('scam', 'S', obsdate, keyfile, log, logfile, verbose) #process the HRS data if lasthrsnum>1: preprocessdata('hrs', 'H', obsdate, keyfile, log, logfile, verbose) preprocessdata('hrs', 'R', obsdate, keyfile, log, logfile, verbose) #check that all data was given a proper proposal id #only do it for semesters after the start of science operations if int(obsdate)>=20110901: # Check to see that the PROPID keyword exists and if not add it message = '\nSALTPIPE -- Checking for PROPID keyword' log.message(message) #check rss data rssstatus=runcheckforpropid(glob.glob('rss/raw/P*.fits'), propids, log) #check scam data scmstatus=runcheckforpropid(glob.glob('scam/raw/S*.fits'), propids, log) #check hrsB data hrsbstatus=runcheckforpropid(glob.glob('hrs/raw/H*.fits'), propids, log) #check hrsB data hrsrstatus=runcheckforpropid(glob.glob('hrs/raw/R*.fits'), propids, log) if not rssstatus or not scmstatus or not hrsbstatus or not hrsrstatus: msg='The PROPIDs for these files needs to be updated and re-start the pipeline' raise SaltError("Invalid PROPID in images:"+msg) #process the RSS data rssrawsize, rssrawnum, rssprodsize, rssprodnum=processdata('rss', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp,logfile, verbose) #advance process the data if rssrawnum > 0: advanceprocess('rss', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp,sdbhost, sdbname, sdbuser, sdbpass, logfile, verbose) #process the SCAM data scmrawsize, scmrawnum, scmprodsize, scmprodnum=processdata('scam', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp,logfile, verbose) #process the HRS data hrsrawsize, hrsrawnum, hrsprodsize, hrsprodnum=hrsprocess('hrs', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp, logfile, verbose) #upload the data to the database img_list=glob.glob(workpath+'scam/product/*bxgp*.fits') img_list.extend(glob.glob(workpath+'rss/product/*bxgp*.fits')) img_list.extend(glob.glob(workpath+'hrs/raw/*.fits')) if img_list: img=','.join('%s' % (k) for k in img_list) saltsdbloadfits(images=img, sdbname=sdbname, sdbhost=sdbhost, sdbuser=sdbuser, \ password=sdbpass, logfile=logfile, verbose=verbose) #add junk sources to the database raw_list=glob.glob(workpath+'scam/raw/S*.fits') raw_list.extend(glob.glob(workpath+'rss/raw/P*.fits')) if raw_list: img='' for img in raw_list: hdu=pyfits.open(img) if hdu[0].header['PROPID'].strip()=='JUNK': saltsdbloadfits(images=img, sdbname=sdbname, sdbhost=sdbhost, sdbuser=sdbuser, \ password=sdbpass, logfile=logfile, verbose=verbose) hdu.close() # construct observation and pipeline documentation if lastrssnum > 1 and rssrawnum>0: rssobslog = 'rss/product/P' + obsdate + 'OBSLOG.fits' else: rssobslog = 'none' if lastscmnum > 1 and scmrawnum>0: scmobslog = 'scam/product/S' + obsdate + 'OBSLOG.fits' else: scmobslog = 'None' if lasthrsnum > 1 and hrsrawnum>0: hrsobslog = 'hrs/product/H' + obsdate + 'OBSLOG.fits' else: hrsobslog = 'None' if rssrawnum==0 and scmrawnum==0 and hrsrawnum==0: msg='No data processed for %s' % obsdate email=False ftp=False log.message(msg) htmlpath = '.' nightlog = '../nightlogfiles/' + obsdate + '.log' readme = iraf.osfn('pipetools$html/readme.template') if not os.path.isfile(nightlog): nightlog = '' message = 'No night log file ~/nightlogfiles/' + obsdate + '.log found' log.warning(message) if (rssrawnum > 0 or scmrawnum > 0 or hrsrawnum>0): salthtml(propcode=propcode,scamobslog=scmobslog,rssobslog=rssobslog, hrsobslog=hrsobslog, htmlpath=htmlpath, nightlog=nightlog,readme=readme,clobber=True,logfile=logfile, verbose=verbose) #add a pause to allow syncing of the databases time.sleep(10) #Add in the environmental information if (rssrawnum > 0 or scmrawnum > 0 or hrsrawnum>0): propids=saltmysql.getpropcodes(sdb, obsdate) for pid in propids: saltelsdata(pid, obsdate, elshost, elsname, elsuser, elspass, sdbhost,sdbname,sdbuser, sdbpass, clobber, logfile,verbose) try: outfile='%s_%s_elsdata.fits' % (pid, obsdate) outdir='%s/doc/' % (pid) shutil.move(outfile, outdir) except: os.remove(outfile) #ftp the data beachdir='/salt/ftparea/' if ftp: try: saltftp(propcode=propcode,obsdate=obsdate, datapath=workpath, password=ftppasswd,beachdir=beachdir,sdbhost=sdbhost, sdbname=sdbname,sdbuser=sdbuser,splitfiles=False, cleanup=True,clobber=True,logfile=logfile, verbose=verbose) except Exception,e: message="Not able to copy data to FTP area:\n%s " % e raise SaltError(message) #run with the splitting of files try: saltftp(propcode=propcode,obsdate=obsdate, datapath=workpath, password=ftppasswd,beachdir=beachdir,sdbhost=sdbhost, sdbname=sdbname,sdbuser=sdbuser,splitfiles=True, cleanup=True,clobber=True,logfile=logfile, verbose=verbose) except Exception,e: message="Not able to copy data to FTP area:\n%s " % e raise SaltError(message)
def slotmerge(images,outimages,outpref,geomfile,clobber,logfile,verbose): with logging(logfile,debug) as log: # are the arguments defined saltsafeio.argdefined('images',images) saltsafeio.argdefined('geomfile',geomfile) saltsafeio.argdefined('logfile',logfile) # if the input file is a list, does it exist? if images[0] == '@': saltsafeio.listexists('Input',images) # parse list of input files infiles=saltsafeio.listparse('Raw image',images,'','','') # check input files exist saltsafeio.filesexist(infiles,'','r') # load output name list: @list, * and comma separated outimages = outimages.strip() outpref = outpref.strip() if len(outpref) == 0 and len(outimages) == 0: raise SaltIOError('Output file(s) not specified') # test output @filelist exists if len(outimages) > 0 and outimages[0] == '@': saltsafeio.listexists('Output',outimages) # parse list of output files outfiles=saltsafeio.listparse('Output image',outimages,outpref,infiles,'') # are input and output lists the same length? saltsafeio.comparelists(infiles,outfiles,'Input','output') # do the output files already exist? if not clobber: saltsafeio.filesexist(outfiles,'','w') # does CCD geometry definition file exist geomfilefile = geomfile.strip() saltsafeio.fileexists(geomfile) # read geometry definition file gap = 0 xshift = [0, 0] yshift = [0, 0] rotation = [0, 0] gap, xshift, yshift, rotation=saltsafeio.readccdgeom(geomfile) for ro in rotation: if ro!=0: log.warning('SLOTMERGE currently ignores CCD rotation') # Begin processes each file for infile, outfile in zip(infiles, outfiles): # determine the name for the output file outpath = outfile.rstrip(os.path.basename(outfile)) if (len(outpath) == 0): outpath = '.' # open each raw image struct=saltsafeio.openfits(infile) # identify instrument instrume,keyprep,keygain,keybias,keyxtalk,keyslot=saltsafekey.instrumid(struct,infile) # how many amplifiers? nccds=saltsafekey.get('NCCDS',struct[0],infile) amplifiers = nccds * 2 #if (nccds != 2): # raise SaltError('Can not currently handle more than two CCDs') # CCD geometry coefficients if instrume == 'RSS' or instrume == 'PFIS': xsh = [xshift[0], 0., xshift[1]] ysh = [yshift[0], 0., yshift[1]] rot = [rotation[0], 0., rotation[1]] refid = 1 if instrume == 'SALTICAM': xsh = [xshift[0], 0.] ysh = [yshift[0], 0.] rot = [rotation[0], 0] refid = 1 # how many extensions? nextend=saltsafekey.get('NEXTEND',struct[0],infile) # how many exposures exposures = nextend/amplifiers # CCD on-chip binning xbin, ybin=saltsafekey.ccdbin(struct[0],infile) gp = int(gap / xbin) # create output hdu structure outstruct = [None] * int(exposures+1) outstruct[0]=struct[0] # iterate over exposures, stitch them to produce file of CCD images for i in range(exposures): # Determine the total size of the image xsize=0 ysize=0 for j in range(amplifiers): hdu=i*amplifiers+j+1 try: xsize += len(struct[hdu].data[0]) if ysize < len(struct[hdu].data): ysize=len(struct[hdu].data) except: msg='Unable to access extension %i ' % hdu raise SaltIOError(msg) xsize += gp* (nccds-1) maxxsh, minxsh = determineshifts(xsh) maxysh, minysh = determineshifts(ysh) xsize += (maxxsh-minxsh) ysize += (maxysh-minysh) # Determine the x and y origins for each frame xdist=0 ydist=0 shid=0 x0=np.zeros(amplifiers) y0=np.zeros(amplifiers) for j in range(amplifiers): x0[j]=xdist+xsh[shid]-minxsh y0[j]=ysh[shid]-minysh hdu=i*amplifiers+j+1 darr=struct[hdu].data xdist += len(darr[0]) if j%2==1: xdist += gp shid += 1 # make the out image outarr=np.zeros((ysize, xsize), np.float64) # Embed each frame into the output array for j in range(amplifiers): hdu=i*amplifiers+j+1 darr=struct[hdu].data outarr=salttran.embed(darr, x0[j], y0[j], outarr) # Add the outimage to the output structure hdu=i*amplifiers+1 outhdu=i+1 outstruct[outhdu] = pyfits.ImageHDU(outarr) outstruct[outhdu].header=struct[hdu].header # Fix the headers in each extension datasec='[1:%4i,1:%4i]' % (xsize, ysize) saltsafekey.put('DATASEC',datasec, outstruct[outhdu], outfile) saltsafekey.rem('DETSIZE',outstruct[outhdu],outfile) saltsafekey.rem('DETSEC',outstruct[outhdu],outfile) saltsafekey.rem('CCDSEC',outstruct[outhdu],outfile) saltsafekey.rem('AMPSEC',outstruct[outhdu],outfile) # add housekeeping key words outstruct[outhdu]=addhousekeeping(outstruct[outhdu], outhdu, outfile) # close input FITS file saltsafeio.closefits(struct) # housekeeping keywords keymosaic='SLOTMERG' fname, hist=history(level=1, wrap=False) saltsafekey.housekeeping(struct[0],keymosaic,'Amplifiers have been mosaiced', hist) #saltsafekey.history(outstruct[0],hist) # this is added for later use by saltsafekey.put('NCCDS', 0.5, outstruct[0]) saltsafekey.put('NSCIEXT', exposures, outstruct[0]) saltsafekey.put('NEXTEND', exposures, outstruct[0]) # write FITS file of mosaiced image outstruct=pyfits.HDUList(outstruct) saltsafeio.writefits(outstruct, outfile, clobber=clobber)