def saltelserror(obsdate, elshost, elsname, elsuser, elspass, sdbhost,sdbname,sdbuser, password, clobber,logfile,verbose): # set up proposers = [] propids = [] pids = [] with logging(logfile,debug) as log: #open the database els=saltmysql.connectdb(elshost, elsname, elsuser, elspass) sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser, password) #create the values for the entire night nid=saltmysql.getnightinfoid(sdb, obsdate) stime, etime=saltmysql.select(sdb, 'EveningTwilightEnd,MorningTwilightStart', 'NightInfo', 'NightInfo_Id=%i' % nid)[0] print stime, etime errors=gettcserrors(els, stime, etime) if not errors: return None for e in errors: i=e.index('T') print i, error_list[i], e[-1] print len(errors)
def saltweather(weathertime, timespan, elshost, elsname, elsuser, elspass, sdbhost, sdbname, sdbuser, password): print weathertime # open the database els = saltmysql.connectdb(elshost, elsname, elsuser, elspass) sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) # determine the obsdate obsdate = weathertime - datetime.timedelta(seconds=43200) obsdate = "%i%s%s" % (obsdate.year, string.zfill(obsdate.month, 2), string.zfill(obsdate.day, 2)) nid = saltmysql.getnightinfoid(sdb, obsdate) print nid # get the most recent weather data airpressure, dewout, extrelhum, temp2m, temp30m, windspeed, winddir, rain = getweatherdata( els, weathertime, timespan ) dewin, relhum = getinsidedata(els, weathertime, timespan) tempin = getacdata(els, weathertime, timespan) # upload that to the sdb upcmd = "Weather_Time='%s', NightInfo_Id=%i" % (weathertime, nid) if tempin is not None: upcmd += ",TemperatureInside=%4.2f" % tempin if temp2m is not None: upcmd += ",Temperature2m=%4.2f" % temp2m if temp30m is not None: upcmd += ",Temperature30m=%4.2f" % temp30m if windspeed is not None: upcmd += ",WindSpeed=%5.2f" % windspeed if winddir is not None: upcmd += ",WindDirection=%5.2f" % winddir if dewin is not None: upcmd += ",DewPointInside=%3.2f" % dewin if dewout is not None: upcmd += ",DewPointOutside=%3.2f" % dewout if airpressure is not None: upcmd += ",AirPressure=%4.2f" % airpressure if relhum is not None: upcmd += ",RelativeHumidty=%4.2f" % relhum if extrelhum is not None: upcmd += ",ExternalRelativeHumidity=%4.2f" % extrelhum if rain is not None: upcmd += ",Rain=%i" % rain saltmysql.insert(sdb, upcmd, "Weather") print upcmd return
def bvitftp(propcode, obsdate, sdbhost,sdbname,sdbuser, password, server,username,sender, bcc, emailfile, notify=False, clobber=True,logfile='salt.log',verbose=True): # set up proposers = [] propids = [] pids = [] with logging(logfile,debug) as log: # are the arguments defined if propcode.strip().upper()=='ALL': pids=getbvitproposalcodes(str(obsdate)) else: pids = saltio.argunpack('propcode',propcode) if len(pids)==0: #throw a warning adn exit if not data needs to be filterd log.warning('No data to distribute\n', with_stdout=verbose) return #log into the database sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser,password) # check PI directories exist ftpdir='/salt/ftparea/' for datapath in pids: pid=os.path.basename(datapath) movebvitdata(pid, datapath, ftpdir, obsdate, sdb, clobber) if notify: notifybvituser(sdb, pid, obsdate, server,username,password,sender, bcc, emailfile)
def runfast(filename, propcode, obsdate, server, readmefile, sdbhost, sdbname, sdbuser, password): """Handle fast data delivery for the proposal. For a given filename """ if propcode is None or propcode=='None': return #first check in the sdb if fast data delivery is needed sdb=saltmysql.connectdb(sdbhost,sdbname, sdbuser, password) select_term='Distinct Surname, email, username, ProposalCode_Id' from_term=''' Block join Pointing using (Block_Id) join PipelineConfig using (Pointing_Id) join Proposal using (Proposal_Id) join ProposalCode using (ProposalCode_Id) join PipelineDataAccessMethod using (PipelineDataAccessMethod_Id) join ProposalContact using (Proposal_Id) join Investigator on (Investigator_Id=Contact_Id) join PiptUser using (PiptUser_Id) ''' where_term="Proposal_Code like '%s' and current=1 and DataAccessMethod='Fast'" \ % (propcode) print 'Select %s from %s where %s' % (select_term, from_term, where_term) try: record=saltmysql.select(sdb, select_term, from_term, where_term) except Exception, e: print e return None
def runfast(filename, propcode, obsdate, server, readmefile, sdbhost, sdbname, sdbuser, password): """Handle fast data delivery for the proposal. For a given filename """ if propcode is None or propcode == 'None': return #first check in the sdb if fast data delivery is needed sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) select_term = 'Distinct Surname, email, username, ProposalCode_Id' from_term = ''' Block join Pointing using (Block_Id) join PipelineConfig using (Pointing_Id) join ProposalCode using (ProposalCode_Id) join PipelineDataAccessMethod using (PipelineDataAccessMethod_Id) join ProposalContact using (ProposalCode_Id) join Investigator on (Investigator_Id=Contact_Id) join PiptUser using (PiptUser_Id) ''' where_term="Proposal_Code like '%s' and DataAccessMethod='Fast'" \ % (propcode) #print 'Select %s from %s where %s' % (select_term, from_term, where_term) try: record = saltmysql.select(sdb, select_term, from_term, where_term) except Exception, e: print e return None
def erroremail(obsdate, server='', username='', password='', sender='', recipient='', bcc='', sdbhost='sdb.salt', sdbname='sdb', sdbuser='', sdbpass='', logfile='saltlog.log', verbose=True): """Update the PipelineStatistics table with the current information about the pipeline """ with logging(logfile,debug) as log: #connect the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, sdbpass) #get the nightinfo_id night_id=saltmysql.getnightinfoid(sdb, obsdate) #create the message try: record=saltmysql.select(sdb, 'PipelineStatus, ErrorMessage', 'PipelineStatistics join PipelineStatus using (PipelineStatus_Id)', 'NightInfo_Id=%i' % night_id) status, error_message=record[0] except Exception, e: raise SaltError('Unable to download information: %s' % e) #set up the subject and message subject='Pipeline Error on %s' % obsdate message="Failure is simply the opportunity to begin again, this time more intelligently.--Henry Ford\n\n" message+="%s" % error_message log.message(message, with_stdout=verbose) #send the email saltio.email(server,username,password,sender,recipient,bcc, subject,message)
def sdbloadobslog(logstr, obsdate, sdbhost, sdbname, sdbuser, password): """Load logstr into the SDB""" print logstr #connect to the db sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #get the nightinfoid #date='YYYY-MM-DD' <- note speechmarks #data= your table #Select NightInfo_Id from NightInfo where Date=$date; #(get nightinfo_Id) #Insert into ObsLogTable values ($nightinfo_Id, $data); logic = "Date='%4s-%2s-%2s'" % (obsdate[0:4], obsdate[4:6], obsdate[6:8]) results = saltmysql.select(sdb, 'NightInfo_Id', 'NightInfo', logic) night_id = results[0][0] #check to see if it exists logic = 'NightInfo_Id=%i' % night_id results = saltmysql.select(sdb, 'NightInfo_Id', 'ObsLogTable', logic) if results: update_cmd = "ObsLogTable='%s'" % (logstr) saltmysql.update(sdb, update_cmd, 'ObsLogTable', logic) else: insert_cmd = "NightInfo_Id=%i, ObsLogTable='%s'" % (night_id, logstr) saltmysql.insert(sdb, insert_cmd, 'ObsLogTable') print obsdate, night_id
def sdbloadobslog(logstr, obsdate, sdbhost, sdbname, sdbuser, password): """Load logstr into the SDB""" print logstr #connect to the db sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #get the nightinfoid #date='YYYY-MM-DD' <- note speechmarks #data= your table #Select NightInfo_Id from NightInfo where Date=$date; #(get nightinfo_Id) #Insert into ObsLogTable values ($nightinfo_Id, $data); logic="Date='%4s-%2s-%2s'" % (obsdate[0:4], obsdate[4:6], obsdate[6:8]) results=saltmysql.select(sdb, 'NightInfo_Id', 'NightInfo', logic) night_id=results[0][0] #check to see if it exists logic='NightInfo_Id=%i' % night_id results=saltmysql.select(sdb, 'NightInfo_Id', 'ObsLogTable', logic) if results: update_cmd="ObsLogTable='%s'" % (logstr) saltmysql.update(sdb, update_cmd, 'ObsLogTable', logic) else: insert_cmd="NightInfo_Id=%i, ObsLogTable='%s'" % (night_id, logstr) saltmysql.insert(sdb, insert_cmd, 'ObsLogTable') print obsdate, night_id
def cleandata(self, filename, iminfo=None, prodir='.', interp='linear', cleanup=True, clobber=False, logfile='saltclean.log', reduce_image=True, display_image=False, verbose=True): """Start the process to reduce the data and produce a single mosaicked image""" #print filename status = 0 #create the input file name infile = os.path.basename(filename) rawpath = os.path.dirname(filename) outpath = './' outfile = outpath + 'mbxp' + infile #print infile, rawpath, outpath #If it is a bin file, pre-process the data if filename.count('.bin'): print "I can't handle this yet" #ignore bcam files if infile.startswith('B'): return iminfo #check to see if it exists and return if clobber is no if os.path.isfile(outfile) and not clobber: return iminfo #handle HRS data print filename if infile.startswith('H') or infile.startswith('R'): outfile = os.path.basename(filename) + 's' print filename, outfile if not os.path.isfile(outfile): os.symlink(filename, outfile) #display the image if display_image: print "Displaying %s" % outfile try: display(outfile) except Exception, e: print e try: log = None #open(logfile, 'a') sdb = saltmysql.connectdb(self.sdbhost, self.sdbname, self.sdbuser, self.password) sdbloadfits(outfile, sdb, log, False) print 'SDBLOADFITS: SUCCESS' except Exception, e: print 'SDBLOADFITSERROR:', e
def saltnightstats(obsdate, outfile, elshost, elsname, elsuser, elspass, sdbhost,sdbname,sdbuser, password, clobber,logfile,verbose): # set up proposers = [] propids = [] pids = [] with logging(logfile,debug) as log: #open the database els=saltmysql.connectdb(elshost, elsname, elsuser, elspass) sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser, password) #create the values for the entire night obsdate=str(obsdate) #create the night stats for the makenightstats(els, sdb, outfile, obsdate, clobber=clobber)
def saltdasload(obsdate, sdbhost, sdbname, sdbuser, password, dashost, dasname, dasuser, daspassword, logfile, verbose): """Upload data from the science database to the SALTDAS database """ with logging(logfile,debug) as log: #open the science database sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser,password) #open saltdas das=saltmysql.connectdb(dashost,dasname,dasuser,daspassword) select='FileData_Id, FileName, Proposal_Code' table='FileData join ProposalCode using (ProposalCode_Id)' logic="FileName like '%"+str(obsdate)+"%'" record=saltmysql.select(sdb, select, table, logic) for r in record[:]: if not isproprietary(r[0], sdb): print r loaddata(r[0], r[1], sdb, das) return
def cleandata(self, filename, iminfo=None, prodir='.', interp='linear', cleanup=True, clobber=False, logfile='saltclean.log', reduce_image=True, display_image=False, verbose=True): """Start the process to reduce the data and produce a single mosaicked image""" #print filename status=0 #create the input file name infile=os.path.basename(filename) rawpath=os.path.dirname(filename) outpath='./' outfile=outpath+'mbxp'+infile #print infile, rawpath, outpath #If it is a bin file, pre-process the data if filename.count('.bin'): print "I can't handle this yet" #ignore bcam files if infile.startswith('B'): return iminfo #check to see if it exists and return if clobber is no if os.path.isfile(outfile) and not clobber: return iminfo #handle HRS data print filename if infile.startswith('H') or infile.startswith('R'): outfile = os.path.basename(filename) +'s' print filename, outfile if not os.path.isfile(outfile): os.symlink(filename, outfile) #display the image if display_image: print "Displaying %s" % outfile try: display(outfile) except Exception, e: print e try: log=None #open(logfile, 'a') sdb=saltmysql.connectdb(self.sdbhost, self.sdbname, self.sdbuser, self.password) sdbloadfits(outfile, sdb, log, False) print 'SDBLOADFITS: SUCCESS' except Exception, e: print 'SDBLOADFITSERROR:', e
def saltsdbloadfits(images, sdbhost, sdbname, sdbuser, password, logfile, verbose): """Upload data about images (which is either a fits file, or specificied by @list) to the science data base, sdb """ with logging(logfile,debug) as log: #open up the infiles infiles = saltio.argunpack('Input',images) sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser,password) for infile in infiles: sdbloadfits(infile, sdb, log, verbose)
def pipelinestatus(obsdate, status, message=None, rawsize=None, reducedsize=None, runtime=None, emailsent=None, sdbhost='sdb.saao', sdbname='sdb', sdbuser='', password='', logfile='saltlog.log', verbose=True): """Update the PipelineStatistics table with the current information about the pipeline """ with logging(logfile,debug) as log: #connect the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #get the nightinfo_id night_id=saltmysql.getnightinfoid(sdb, obsdate) #get the status_id for the given status status_id=getstatusid(sdb, status) #create the insert command obsdate=str(obsdate) inst_cmd="NightInfo_Id=%s, PipelineStatus_Id=%i" % (night_id, status_id) if status_id>10: inst_cmd+=',ErrorMessage="%s"' % message if rawsize is not None: inst_cmd+=",RawSize=%f" % rawsize if reducedsize is not None: inst_cmd+=",ReducedSize=%f" % rawsize if runtime is not None: inst_cmd+=",PipelineRunTime=%i" % runtime if emailsent is not None: inst_cmd+=",EmailSent=%i" % emailsent print inst_cmd #insert or update the pipeline if checktable(sdb, night_id): saltmysql.update(sdb, inst_cmd, 'PipelineStatistics', 'NightInfo_Id=%i' % night_id) msg="Updating information for Night_Id=%i\n" % night_id else: saltmysql.insert(sdb, inst_cmd, 'PipelineStatistics') msg="Inserting information for Night_Id=%i\n" % night_id #log the call log.message(msg+inst_cmd, with_stdout=verbose)
def updatenightinfo(obsdate, sdbhost='sdb.saao', sdbname='sdb', \ sdbuser='', password='', logfile='saltlog.log', verbose=True): """Update the nightinfo table with current information about the night """ with logging(logfile,debug) as log: #connect the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #get the nightinfo_id try: night_id=saltmysql.getnightinfoid(sdb, obsdate) except SaltError: night_id=None #get the information for the night time_list=get_nightdetails(obsdate) #create the insert command obsdate=str(obsdate) inst_cmd="Date='%s-%s-%s'," % (obsdate[0:4], obsdate[4:6], obsdate[6:8]) inst_cmd+="SunSet='%s', SunRise='%s', MoonSet='%s', MoonRise='%s', EveningTwilightEnd='%s', MorningTwilightStart='%s'" % \ (time_list[0], time_list[1], time_list[2], time_list[3], time_list[4], time_list[5]) inst_cmd+=",MoonPhase_Percent=%i" % (round(float(time_list[6]))) if night_id: saltmysql.update(sdb, inst_cmd, 'NightInfo', 'NightInfo_Id=%i' % night_id) msg="Updating information for Night_Id=%i\n" % night_id else: saltmysql.insert(sdb, inst_cmd, 'NightInfo') msg="Inserting information for Night_Id=%i\n" % night_id #log the call log.message(msg+inst_cmd, with_stdout=verbose)
def saltdataquality(obsdate, sdbhost = "sdb.saao", sdbname = "sdb", sdbuser = "", password = '', clobber=False, logfile='salt.log',verbose=True): """For a given SALT data. Move the data to /salt/data after it has been run by the pipeline""" #make a dataquality directory dqdir='dq%s' % obsdate if os.path.isdir(dqdir) and clobber: saltio.deletedir(dqdir) saltio.createdir(dqdir) saltio.changedir(dqdir) with logging(logfile,debug) as log: #check the entries saltio.argdefined('obsdate',str(obsdate)) #open up the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #run for each instrument for instrume in ['rss', 'scam']: log.message('Measuring Data Quality for %s observations' % instrume.upper()) dataquality(str(obsdate), sdb, instrume, clobber, logfile, verbose)
def saltelsdata(propcode, obsdate, elshost, elsname, elsuser, elspass, sdbhost,sdbname,sdbuser, password, clobber,logfile,verbose): # set up proposers = [] propids = [] pids = [] with logging(logfile,debug) as log: # are the arguments defined if propcode.strip().upper()=='ALL': pids=saltmysql.getproposalcodes(str(obsdate), sdbhost,sdbname,sdbuser, password) else: pids = saltio.argunpack('propcode',propcode) #open the database els=saltmysql.connectelsview(elshost, elsname, elsuser, elspass) sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser, password) #create the values for the entire night #loop through the proposals for pid in pids: outfile='%s_%s_elsdata.fits' % (pid, obsdate) if clobber and os.path.isfile(outfile): saltio.delete(outfile) mintime, maxtime=determinetime(sdb, pid, obsdate) message='Extracting ELS data for %s from %s to %s' % (pid, mintime, maxtime) log.message(message, with_stdout=verbose) getelsdata(els, sdb, outfile, mintime, maxtime)
def saltcharge( obsdate, outfile, sdbhost="sdb.saao", sdbname="sdb", sdbuser="", password="", clobber=False, logfile="saltlog.log", verbose=True, ): """Calculate the charged time for proposals observed during a night """ with logging(logfile, debug) as log: # check the outfiles if not saltio.checkfornone(outfile): outfile = None # check that the output file can be deleted if outfile: saltio.overwrite(outfile, clobber) # connect the database sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) # get all the proposals observed during the night select_state = "distinct Proposal_Code" table_state = "FileData Join ProposalCode using (ProposalCode_Id)" logic_state = "FileName like '%" + obsdate + "%'" records = saltmysql.select(sdb, select_state, table_state, logic_state) pids = [] pid_time = {} for r in records: pids.append(r[0]) pid_time[r[0]] = 0 if len(pids) == 0: message = "There are no proposal to charge time for %s" % obsdate log.message(message) return # get the nightinfo_id night_id = saltmysql.getnightinfoid(sdb, obsdate) print night_id # get a list of all the images taken during the night select_state = "FileName, Proposal_Code, Target_Name, ExposureTime, UTSTART, INSTRUME, OBSMODE, DETMODE, CCDTYPE, NExposures" table_state = "FileData Join ProposalCode using (ProposalCode_Id) join FitsHeaderImage using (FileData_Id)" logic_state = "FileName like '%" + obsdate + "%'" img_list = saltmysql.select(sdb, select_state, table_state, logic_state) # get all the blocks visited select_state = "Block_Id, Accepted, Proposal_Code" table_state = "Block join BlockVisit using (Block_Id) join Proposal using (Proposal_Id) join ProposalCode using (ProposalCode_Id)" logic_state = "NightInfo_Id=%i" % night_id block_list = saltmysql.select(sdb, select_state, table_state, logic_state) print block_list # get the start and end of twilight nightstart = saltmysql.select(sdb, "EveningTwilightEnd", "NightInfo", "NightInfo_Id=%i" % night_id)[0][0] nightend = saltmysql.select(sdb, "MorningTwilightStart", "NightInfo", "NightInfo_Id=%i" % night_id)[0][0] print nightstart, nightend, (nightend - nightstart).seconds print # download the SOMMI log from the night try: sommi_log = saltmysql.select(sdb, "SONightLog", "NightLogs", "NightInfo_Id=%i" % night_id)[0][0] except Exception, e: msg = "Unable to read in SOMMI log for %s" % obsdate raise SaltError(msg) # add to account for error in SOMMI log if int(obsdate) < 20111027: sommi_log = sommi_log.replace("Object name", "\nObject name") # parse the sommi log point_list = parseforpointings(sommi_log) # now find all blocks observed during a night for point in point_list: # proposal for that block pid = point[0].strip() # start time for that block starttime = point[1] # find the end time for that block endtime = findnexttime(point[1], point_list, nightend) # find the start time of the first object to be observed # for this block startimage = findfirstimage(pid, starttime, img_list) lastimage = findlastimage(pid, endtime, img_list) # check to see if the end time for the last file observed for # this block if startimage is not None: if startimage > endtime: startimage = None # determine the acquisition time for the block if startimage is not None: acqdelta = (startimage - starttime).seconds else: acqdelta = -1 # find the shutter open time shuttertime = calculate_shutteropen(img_list, starttime, endtime) # if the shutter and time of the last image is substantially different from the # end of the block, then take the last image as the end of the block # *TODO* Change to use expected block time st = starttime + datetime.timedelta(0, shuttertime + acqdelta) if (endtime - st).seconds > 900: if lastimage is None: endtime = st elif (lastimage - st).seconds > 3600: endtime = st else: endtime = lastimage # account for the time for that block tblock = (endtime - starttime).seconds if acqdelta > -1: # find the associated block block_id, blockvisit_id, accepted = getblockid(sdb, night_id, pid, img_list, starttime, endtime) if accepted and pid.count("-3-"): # charged time for that block try: pid_time[pid] += tblock except KeyError: pid_time[pid] = tblock print block_id, blockvisit_id, pid, starttime, tblock, acqdelta, shuttertime, shuttertime / tblock, block_id, accepted # update the charge time slewdelta = 0 scidelta = tblock - acqdelta update_cmd = "TotalSlewTime=%i, TotalAcquisitionTime=%i, TotalScienceTime=%i" % ( slewdelta, acqdelta, scidelta, ) table_cmd = "BlockVisit" logic_cmd = "BlockVisit_Id=%i" % blockvisit_id saltmysql.update(sdb, update_cmd, table_cmd, logic_cmd) return # update the charged time information # TODO: Check to see if the block was approved ptime_tot = 0 stime_tot = 0 for k in pid_time: ptime = pid_time[k] if ptime > 0: obsratio = stime / ptime else: obsratio = 0 print "%25s %5i %5i %3.2f" % (k, ptime, stime, obsratio) if k.count("-3-"): ptime_tot += ptime stime_tot += stime # print out the total night statistics tdelta = nightend - nightstart print tdelta.seconds, ptime_tot, stime_tot
#if detmode=='FT' or detmode=='FRAME TRANSFER': return iminfo #reduce the data if reduce_image: try: quickclean(filename, interp, cleanup, clobber, logfile, verbose) except Exception, e: print e return iminfo #load the data into the SDB if self.sdbhost and self.update: try: log=None #open(logfile, 'a') sdb=saltmysql.connectdb(self.sdbhost, self.sdbname, self.sdbuser, self.password) sdbloadfits(filename, sdb, log, False) print 'SDBLOADFITS: SUCCESS' except Exception, e: print 'SDBLOADFITSERROR:', e #display the image if display_image: print "Displaying %s" % outfile try: display(outfile) except Exception, e: print e #if the images are imaging data, run sextractor on them name=iminfo[0]
def saltpipe(obsdate,pinames,archive,ftp,email,emserver,emuser,empasswd,bcc, qcpcuser,qcpcpasswd, ftpserver,ftpuser,ftppasswd,sdbhost, sdbname, sdbuser, sdbpass, elshost, elsname, elsuser, elspass, median,function,order,rej_lo,rej_hi,niter,interp, clobber, runstatus, logfile,verbose): # set up basedir=os.getcwd() propcode=pinames sender = emuser + '@salt.ac.za' recipient = sender emessage = '' emailfile = '../piemaillist/email.lis' # check the observation date is sensible if ('/' in obsdate or '20' not in obsdate or len(obsdate) != 8): emessage = 'Observation date does not look sensible - YYYYMMDD\n' raise SaltError(emessage) # stop if the obsdate temporary directory already exists obsdir='%s' % obsdate if os.path.exists(obsdir): emessage += 'The temporary working directory ' + os.getcwd() + '/' emessage += obsdate + ' already exists. ' raise SaltError(emessage) # create a temporary working directory and move to it saltio.createdir(obsdir) saltio.changedir(obsdir) workpath = saltio.abspath('.') # test the logfile logfile = workpath+logfile logfile = saltio.logname(logfile) #note the starttime starttime = time.time() #start logging with logging(logfile,debug) as log: #connect to the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, sdbpass) #get the nightinfo id nightinfoid=saltmysql.getnightinfoid(sdb, obsdate) #Get the list of proposal codes state_select='Proposal_Code' state_tables='Proposal join ProposalCode using (ProposalCode_Id)' state_logic="current=1" records=saltmysql.select(sdb, state_select, state_tables, state_logic) propids=[k[0] for k in records] # Calculate the current date currentdate=salttime.currentobsdate() # are the arguments defined saltio.argdefined('obsdate',obsdate) # check email and ftp arguments are consistent if email and not ftp: message = 'ERROR: SALTPIPE -- cannot send email to PI(s) unless data is transferred ' message += 'to the FTP server; use ftp=\'yes\' email=\'yes\'' raise SaltError(message) # identify a potential list of keyword edits keyfile = '../newheadfiles/list_newhead_' + obsdate if not os.path.isfile(keyfile): message = '\nSALTPIPE -- keyword edits ' + keyfile + ' not found locally' log.message(message) # check directories for the raw RSS data rssrawpath = makerawdir(obsdate, 'rss') # check directories for the raw SALTICAM data scmrawpath = makerawdir(obsdate, 'scam') # check raw directories for the disk.file record and find last file number #check rss data lastrssnum = checkfordata(rssrawpath, 'P', obsdate, log) #check scame data lastscmnum = checkfordata(scmrawpath, 'S', obsdate, log) #check for HRS Data--not filedata yet, so cannot check if lastrssnum == 1 and lastscmnum == 1: message = 'SALTPIPE -- no SALTICAM or RSS data obtained on ' + obsdate emessage += '\n' + message + '\n' log.message(message) #copy the data to the working directory if lastrssnum > 1: message = 'Copy ' + rssrawpath + ' --> ' + workpath + 'raw/' log.message(message) saltio.copydir(rssrawpath,'rss/raw') if lastscmnum > 1: message = 'Copy ' + scmrawpath + ' --> ' + workpath + 'raw/' log.message(message) saltio.copydir(scmrawpath,'scam/raw') #copy and pre-process the HRS data try: hrsbrawpath = makerawdir(obsdate, 'hbdet') saltio.createdir('hrs') saltio.createdir('hrs/raw') message = 'Copy ' + hrsbrawpath + ' --> ' + workpath + 'raw/' log.message(message) salthrspreprocess(hrsbrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose) hrsrrawpath = makerawdir(obsdate, 'hrdet') message = 'Copy ' + hrsrrawpath + ' --> ' + workpath + 'raw/' log.message(message) salthrspreprocess(hrsrrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose) lasthrsnum=len(glob.glob('hrs/raw/*fits')) except Exception,e: log.message('Could not copy HRS data because %s' % e) lasthrsnum=0 if lastrssnum>1 or lastscmnum>1: message = 'Copy of data is complete' log.message(message) else: message = 'No data was taken on %s' % obsdate log.message(message) #process the data RSS data if lastrssnum>1: preprocessdata('rss', 'P', obsdate, keyfile, log, logfile, verbose) #process the SCAM data if lastscmnum>1: preprocessdata('scam', 'S', obsdate, keyfile, log, logfile, verbose) #process the HRS data if lasthrsnum>1: preprocessdata('hrs', 'H', obsdate, keyfile, log, logfile, verbose) preprocessdata('hrs', 'R', obsdate, keyfile, log, logfile, verbose) #check that all data was given a proper proposal id #only do it for semesters after the start of science operations if int(obsdate)>=20110901: # Check to see that the PROPID keyword exists and if not add it message = '\nSALTPIPE -- Checking for PROPID keyword' log.message(message) #check rss data rssstatus=runcheckforpropid(glob.glob('rss/raw/P*.fits'), propids, log) #check scam data scmstatus=runcheckforpropid(glob.glob('scam/raw/S*.fits'), propids, log) #check hrsB data hrsbstatus=runcheckforpropid(glob.glob('hrs/raw/H*.fits'), propids, log) #check hrsB data hrsrstatus=runcheckforpropid(glob.glob('hrs/raw/R*.fits'), propids, log) if not rssstatus or not scmstatus or not hrsbstatus or not hrsrstatus: msg='The PROPIDs for these files needs to be updated and re-start the pipeline' raise SaltError("Invalid PROPID in images:"+msg) #process the RSS data rssrawsize, rssrawnum, rssprodsize, rssprodnum=processdata('rss', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp,logfile, verbose) #advance process the data if rssrawnum > 0: advanceprocess('rss', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp,sdbhost, sdbname, sdbuser, sdbpass, logfile, verbose) #process the SCAM data scmrawsize, scmrawnum, scmprodsize, scmprodnum=processdata('scam', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp,logfile, verbose) #process the HRS data hrsrawsize, hrsrawnum, hrsprodsize, hrsprodnum=hrsprocess('hrs', obsdate, propcode, median, function, order, rej_lo, rej_hi, niter, interp, logfile, verbose) #upload the data to the database img_list=glob.glob(workpath+'scam/product/*bxgp*.fits') img_list.extend(glob.glob(workpath+'rss/product/*bxgp*.fits')) img_list.extend(glob.glob(workpath+'hrs/raw/*.fits')) if img_list: img=','.join('%s' % (k) for k in img_list) saltsdbloadfits(images=img, sdbname=sdbname, sdbhost=sdbhost, sdbuser=sdbuser, \ password=sdbpass, logfile=logfile, verbose=verbose) #add junk sources to the database raw_list=glob.glob(workpath+'scam/raw/S*.fits') raw_list.extend(glob.glob(workpath+'rss/raw/P*.fits')) if raw_list: img='' for img in raw_list: hdu=pyfits.open(img) if hdu[0].header['PROPID'].strip()=='JUNK': saltsdbloadfits(images=img, sdbname=sdbname, sdbhost=sdbhost, sdbuser=sdbuser, \ password=sdbpass, logfile=logfile, verbose=verbose) hdu.close() # construct observation and pipeline documentation if lastrssnum > 1 and rssrawnum>0: rssobslog = 'rss/product/P' + obsdate + 'OBSLOG.fits' else: rssobslog = 'none' if lastscmnum > 1 and scmrawnum>0: scmobslog = 'scam/product/S' + obsdate + 'OBSLOG.fits' else: scmobslog = 'None' if lasthrsnum > 1 and hrsrawnum>0: hrsobslog = 'hrs/product/H' + obsdate + 'OBSLOG.fits' else: hrsobslog = 'None' if rssrawnum==0 and scmrawnum==0 and hrsrawnum==0: msg='No data processed for %s' % obsdate email=False ftp=False log.message(msg) htmlpath = '.' nightlog = '../nightlogfiles/' + obsdate + '.log' readme = iraf.osfn('pipetools$html/readme.template') if not os.path.isfile(nightlog): nightlog = '' message = 'No night log file ~/nightlogfiles/' + obsdate + '.log found' log.warning(message) if (rssrawnum > 0 or scmrawnum > 0 or hrsrawnum>0): salthtml(propcode=propcode,scamobslog=scmobslog,rssobslog=rssobslog, hrsobslog=hrsobslog, htmlpath=htmlpath, nightlog=nightlog,readme=readme,clobber=True,logfile=logfile, verbose=verbose) #add a pause to allow syncing of the databases time.sleep(10) #Add in the environmental information if (rssrawnum > 0 or scmrawnum > 0 or hrsrawnum>0): propids=saltmysql.getpropcodes(sdb, obsdate) for pid in propids: saltelsdata(pid, obsdate, elshost, elsname, elsuser, elspass, sdbhost,sdbname,sdbuser, sdbpass, clobber, logfile,verbose) try: outfile='%s_%s_elsdata.fits' % (pid, obsdate) outdir='%s/doc/' % (pid) shutil.move(outfile, outdir) except: os.remove(outfile) #ftp the data beachdir='/salt/ftparea/' if ftp: try: saltftp(propcode=propcode,obsdate=obsdate, datapath=workpath, password=ftppasswd,beachdir=beachdir,sdbhost=sdbhost, sdbname=sdbname,sdbuser=sdbuser,splitfiles=False, cleanup=True,clobber=True,logfile=logfile, verbose=verbose) except Exception,e: message="Not able to copy data to FTP area:\n%s " % e raise SaltError(message) #run with the splitting of files try: saltftp(propcode=propcode,obsdate=obsdate, datapath=workpath, password=ftppasswd,beachdir=beachdir,sdbhost=sdbhost, sdbname=sdbname,sdbuser=sdbuser,splitfiles=True, cleanup=True,clobber=True,logfile=logfile, verbose=verbose) except Exception,e: message="Not able to copy data to FTP area:\n%s " % e raise SaltError(message)
smtp.connect(server) smtp.ehlo() smtp.starttls() smtp.ehlo() except Exception, e: message = 'Cannot connect to %s because %s' % (server, e) log.error(message) try: smtp.login(username,password) except Exception, e: message = 'Cannot login to %s as %s because %s' % (server, username, e) log.error(message) # log into the mysql data base and download a list of proposal codes sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) select='distinct Proposal_Code' table='FileData join ProposalCode using (ProposalCode_Id)' logic="FileName like '%" + obsdate + "%'" records=saltmysql.select(sdb, select, table, logic) if len(records)<1: msg="No observations available for %s" % obsdate log.warning(msg) return else: for p in records: propids.append(p[0]) #clean the proposal list print pids, propids
def saltslewstats(startdate, enddate, elshost, elsname, elsuser, elspass, sdbhost,sdbname,sdbuser, password, clobber,logfile,verbose): # set up proposers = [] propids = [] pids = [] with logging(logfile,debug) as log: #open the database els=saltmysql.connectdb(elshost, elsname, elsuser, elspass) sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser, password) #loop through each night and determine the statistics for the slew times #for each night obsdate=int(startdate) slew_list=[] while obsdate < int(enddate): night_id=saltmysql.getnightinfoid(sdb, obsdate) start,end=saltmysql.select(sdb,'EveningTwilightEnd,MorningTwilightStart','NightInfo', 'NightInfo_Id=%i' % night_id)[0] tslew, nslew=slewtime(els, start, end) if nslew>0: print obsdate, night_id, start, end, nslew, tslew/nslew slew_list.append([start, nslew, tslew]) obsdate=int(getnextdate(obsdate)) slew_arr=np.array(slew_list) days=np.zeros(len(slew_arr)) for i in range(len(slew_arr)): days[i]=(slew_arr[i,0]-slew_arr[0,0]).days coef=np.polyfit(days, slew_arr[:,2]/slew_arr[:,1], 2) ave_date=[] ave_values=[] ave_nslews=[] nstep=10 for i in np.arange(15,len(slew_arr), 2*nstep): ave_date.append(slew_arr[i,0]) i1=i-nstep i2=min(i+nstep, len(slew_arr)) #ave_values.append(np.median(slew_arr[i1:i2,2]/slew_arr[i1:i2,1])) ave_nslews.append(np.median(slew_arr[i1:i2,1])) ave_values.append(np.median(slew_arr[i1:i2,2])) print ave_date[-1], ave_values[-1], ave_nslews[-1] ave_values=np.array(ave_values) ave_nslews=np.array(ave_nslews) mean_slew=ave_nslews.mean() mean_slew=11 for i in range(len(ave_date)): #value is an attempt to correct for the average number of slews value=(ave_values[i]+30*(mean_slew-ave_nslews[i]))/mean_slew print ave_date[i], '%i %i %i' % (ave_values[i]/ave_nslews[i], ave_nslews[i], value) plt.figure() plt.plot(slew_arr[:,0], slew_arr[:,2]/slew_arr[:,1], ls='', marker='o') #plt.plot(slew_arr[:,0], slew_arr[:,1], ls='', marker='o') #plt.plot(slew_arr[:,0], slew_arr[:,2], ls='', marker='o') plt.plot(ave_date, ave_values/ave_nslews) #plt.plot(days, np.polyval(coef, days)) plt.ylabel('Slew Time(s)') plt.xlabel('Date') plt.show()
def findcal(obsdate, sdbhost, sdbname, sdbuser, password): """Find all of the unique configuration settings used on a night and insert it into the database obsdate--Observation date in YYYYMMDD sdbhost--host name for sdb sdbname--name of the sdb sdbuser--user for the sdb password--sdb password """ #connect to the db sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #get the nightinfo id logic="Date='%4s-%2s-%2s'" % (obsdate[0:4], obsdate[4:6], obsdate[6:8]) results=saltmysql.select(sdb, 'NightInfo_Id', 'NightInfo', logic) night_id=results[0][0] #select all the scam data from this obsdate cmd_select='d.FileName, d.FileData_Id, f.CCDTYPE, d.DETMODE, CCDSUM, GAINSET, ROSPEED, FILTER' cmd_table=''' FileData as d left join FitsHeaderImage as f using (FileData_Id) left join FitsHeaderSalticam using (FileData_Id) ''' cmd_logic="d.FileName like 'S" + obsdate+"%' and f.CCDTYPE='OBJECT'" results=saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic) #loop through all the results and return only the Set of identical results caldict=create_caldict(results) #insert the results into the database for k in caldict: #first check to see if it has already been entered record=saltmysql.select(sdb, 'FileData_Id', 'SalticamNightlyCalibration', 'FileData_Id=%i' % k) if len(record)<1: #check for block_id blockid=saltmysql.select(sdb, 'Block_Id', 'FileData', 'FileData_Id=%i' % k)[0][0] #get the calibration types requested if blockid: request=saltmysql.select(sdb, 'SalticamCalibrationType_Id', 'SalitcamCalibration', 'Block_Id=%i' % blockid) for cid in request: cid=cid[0] cmd_insert='NightInfo_Id=%i, FileData_Id=%i, SatlicamCalibrationType_Id=%i' % (night_id, k, cid) #saltmysql.insert(sdb, cmd_insert, 'SalitcamNightlyCalibration') print(k, " ".join([str(k) for k in caldict[k]])) #list of rss calibration types #+-----------------------+----------------------------------+ #| RssCalibrationType_Id | CalibrationType | #+-----------------------+----------------------------------+ #| 2 | Arc | #| 1 | Bias | #| 14 | Imaging flat - Lamp | #| 15 | Imaging flat - Twilight | #| 3 | Spectroscopic flat - Lamp | #| 4 | Spectroscopic flat - Twilight | #| 12 | Standard - Circular polarimetric | #| 13 | Standard - Lick | #| 9 | Standard - Linear polarimetric | #| 5 | Standard - Photometric | #| 8 | Standard - RV | #| 11 | Standard - Smooth spectrum | #| 7 | Standard - Spectrophotometric | #| 6 | Standard - Spectroscopic | #| 10 | Standard - Unpolarised | #+-----------------------+----------------------------------+ #select all the RSS data from this obsdate rssheaderlist='f.CCDTYPE, d.DETMODE, d.OBSMODE, CCDSUM, GAINSET, ROSPEED, FILTER, GRATING, GR_STA, AR_STA, MASKID' cmd_select='d.FileName,d.FileData_Id, %s' % rssheaderlist # CCDTYPE, DETMODE, OBSMODE, CCDSUM, GAINSET, ROSPEED, FILTER, GRATING, GR_STA, AR_STA, MASKID' cmd_table=''' FileData as d left join FitsHeaderImage as f using (FileData_Id) left join FitsHeaderRss using (FileData_Id) join ProposalCode using (ProposalCode_Id) ''' cmd_logic="d.FileName like 'P" + obsdate+"%' and CCDTYPE='OBJECT' and Proposal_Code not like 'CAL_SPST'" results=saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic) #loop through all the results and return only the Set of identical results caldict=create_caldict(results) #insert the rss results into the database for k in caldict: #first check to see if it has already been entered record=saltmysql.select(sdb, 'FileData_Id', 'RssNightlyCalibration', 'FileData_Id=%i' % k) if len(record)<1: #period for checking for SPST. If the uses requests this, it gets set to # 7 days, but the default is taken within the last month for non-requests period=30 #check for block_id blockid=saltmysql.select(sdb, 'Block_Id', 'FileData', 'FileData_Id=%i' % k)[0][0] #get the calibration types requested if blockid: request=saltmysql.select(sdb, 'RssCalibrationType_Id', 'RssCalibration', 'Block_Id=%i' % blockid) for cid in request: cid=cid[0] #check to see if the request is already in the database or if a similar request has #been taken recently if cid==1: #bias if not checkforbias(sdb, k): cmd_insert='NightInfo_Id=%i, FileData_Id=%i, RssCalibrationType_Id=%i' % (night_id, k, cid) saltmysql.insert(sdb, cmd_insert, 'RssNightlyCalibration') elif cid in [3,4]: #flats if not checkforflats(sdb, k, cid, rssheaderlist, instr='rss', keylist=caldict[k], period=90): cmd_insert='NightInfo_Id=%i, FileData_Id=%i, RssCalibrationType_Id=%i' % (night_id, k, cid) saltmysql.insert(sdb, cmd_insert, 'RssNightlyCalibration') elif cid==7: #specstand period=7 #print period, k, caldict[k] if not checkforspst(sdb, k, caldict[k], rssheaderlist, period=period): cmd_insert='NightInfo_Id=%i, FileData_Id=%i, RssCalibrationType_Id=7' % (night_id, k) saltmysql.insert(sdb, cmd_insert, 'RssNightlyCalibration') else: cmd_insert='NightInfo_Id=%i, FileData_Id=%i, RssCalibrationType_Id=%i' % (night_id, k, cid) saltmysql.insert(sdb, cmd_insert, 'RssNightlyCalibration') print(k, cid," ".join([str(w) for w in caldict[k]])) return
def runfast(filename, propcode, obsdate, server, readmefile, sdbhost, sdbname, sdbuser, password): """Handle fast data delivery for the proposal. For a given filename """ if propcode is None or propcode=='None': return #first check in the sdb if fast data delivery is needed sdb=saltmysql.connectdb(sdbhost,sdbname, sdbuser, password) select_term='Distinct Surname, email, username, ProposalCode_Id' from_term=''' Block join Pointing using (Block_Id) join PipelineConfig using (Pointing_Id) join Proposal using (Proposal_Id) join ProposalCode using (ProposalCode_Id) join PipelineDataAccessMethod using (PipelineDataAccessMethod_Id) join ProposalContact using (Proposal_Id) join Investigator on (Investigator_Id=Contact_Id) join PiptUser using (PiptUser_Id) ''' where_term="Proposal_Code like '%s' and current=1 and DataAccessMethod='Fast'" \ % (propcode) #print 'Select %s from %s where %s' % (select_term, from_term, where_term) try: record=saltmysql.select(sdb, select_term, from_term, where_term) except Exception as e: print(e) return None #print "Checking for fast data" #print record if record: surname, email, username, propid= record[0] #print surname, email, username, propid else: return #second if so, then copy the data to the contact PI directory #on saltpipe under the fast directory. #rawfilename=getrawfilename(filename) y=os.system('scp %s sa@saltpipe:/salt/ftparea/%s/fast%s/' % (filename, username, obsdate)) if y==256: y=os.system('ssh sa@saltpipe mkdir /salt/ftparea/%s/fast%s' % (username, obsdate)) y=os.system('scp %s sa@saltpipe:/salt/ftparea/%s/fast%s/' % (filename, username, obsdate)) if y!=0: print("Problem with copying file %s to /salt/ftparea/%s/fast%s/" % (filename, username, obsdate)) #copy the reduced data y=os.system('scp mbxp%s sa@saltpipe:/salt/ftparea/%s/fast%s/' % (os.path.basename(filename), username, obsdate)) #check the type of data it is and copy over an ancillery data as well #if it is the first object file, check to see if an email has been #sent, and if not, send email #try to copy the spectroscopic data print(filename, filename.startswith('P')) if os.path.basename(filename).startswith('P'): sfilename='smbxp%s.txt' % (os.path.basename(filename).split('.fits')[0]) print(sfilename) try: y=os.system('scp %s sa@saltpipe:/salt/ftparea/%s/fast%s/' % (sfilename, username, obsdate)) except Exception as e: print(e) if os.path.basename(filename).startswith('S'): try: sfilename='mbxp%s.cat' % (os.path.basename(filename).split('.fits')[0]) print(sfilename) y=os.system('scp %s sa@saltpipe:/salt/ftparea/%s/fast%s/' % (sfilename, username, obsdate)) except Exception as e: print(e) #check to see if an email has been sent select_term='PipelineStatus' from_term=''' PipelineProposalStatistics join PipelineStatus using (PipelineStatus_Id) join NightInfo using (NightInfo_Id) join ProposalCode using (ProposalCode_Id) ''' where_term="Proposal_Code like '%s' and Date='%s-%s-%s'" % (propcode, obsdate[0:4], obsdate[4:6], obsdate[6:8]) print(select_term, from_term, where_term) try: record=saltmysql.select(sdb, select_term, from_term, where_term)[0][0] except: record=None print(record) if record=='FastEmail': return else: #insert information into the database nightinfoid=saltmysql.getnightinfoid(sdb, obsdate) insert_term="NightInfo_Id=%i, ProposalCode_Id=%i, PipelineStatus_Id=8" % (nightinfoid, propid) table_term="PipelineProposalStatistics" saltmysql.insert(sdb, insert_term, "PipelineProposalStatistics") #send email sender='*****@*****.**' recipient=email bcc='*****@*****.**' subject='SALT data available for %s' % propcode message=open(readmefile).read() message=message.replace('OBSDATE', obsdate) sendemail(server,'sa',password,sender,recipient,bcc, subject,message) sdb.close() return
def saltcalibrations( propcode, outfile=None, sdbhost="sdb.saao", sdbname="sdb", sdbuser="", password="", clobber=False, logfile="saltlog.log", verbose=True, ): """Seach the salt database for FITS files """ with logging(logfile, debug) as log: # check the outfiles if not saltio.checkfornone(outfile): outfile = None # check that the output file can be deleted if outfile: saltio.overwrite(outfile, clobber) fout = open(oufile, "w") # connect to the database sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) # determine the associated username = saltmysql.getpiptusername(sdb, propcode) userdir = "/salt/ftparea/%s/spst" % username if not os.path.exists(userdir): saltio.createdir(userdir) log.message("Will copy data to %s" % userdir) # Find all the data assocated with a proposal cmd_select = "d.FileName,d.FileData_Id, CCDTYPE, d.DETMODE, d.OBSMODE, CCDSUM, GAINSET, ROSPEED, FILTER, GRATING, GRTILT, CAMANG, MASKID" cmd_table = """ FileData as d left join FitsHeaderImage using (FileData_Id) left join FitsHeaderRss using (FileData_Id) left join ProposalCode using (ProposalCode_Id) """ cmd_logic = 'Proposal_Code="%s" and CCDTYPE="OBJECT" and d.OBSMODE="SPECTROSCOPY"' % (propcode) record = saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic) # loop through all the results and return only the Set of identical results caldict = create_caldict(record) # prepare for writing out the results outstr = "" if outfile: fout.write(outstr + "\n") else: print outstr # now find all the cal_spst that have the same settings cmd_select = "d.FileName,d.FileData_Id, CCDTYPE, d.DETMODE, d.OBSMODE, CCDSUM, GAINSET, ROSPEED, FILTER, GRATING, GRTILT, CAMANG, MASKID" cmd_table = """ FileData as d left join FitsHeaderImage using (FileData_Id) left join FitsHeaderRss using (FileData_Id) left join ProposalCode using (ProposalCode_Id) """ for r in caldict: cmd_logic = "CCDSUM='%s' and GRATING='%s' and GRTILT='%s' and CAMANG='%s' and Proposal_Code='CAL_SPST'" % ( caldict[r][3], caldict[r][7], caldict[r][8], caldict[r][9], ) # cmd_logic="CCDSUM='%s' and GRATING='%s' and AR_STA='%s' " % (caldict[r][3], caldict[r][7], caldict[r][9]) log.message(cmd_logic, with_header=False) record = saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic) # print record # write out hte results for r in record: outstr = " ".join(["%s" % x for x in r]) if outfile: fout.write(outstr + "\n") else: log.message(outstr, with_header=False) # copy to the user directory cfile = makefilename(r[0], state="product") shutil.copy(cfile, userdir) cfile = makefilename(r[0], state="raw") shutil.copy(cfile, userdir) # close outfile if outfile: fout.close()
def hrscalibrations( obsdate, sdbhost="sdb.saao", sdbname="sdb", sdbuser="", password="", clobber=False, logfile="saltlog.log", verbose=True, ): """Seach the salt database for FITS files """ with logging(logfile, debug) as log: # make obsdate a string if needed obsdate = str(obsdate) log.message("Sorting HRS calibration data") # connect to the database sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) # first select propcodes for all HRS data from that day and check for any CAL data table = "FileData join ProposalCode using (ProposalCode_Id)" logic = "FileName like 'H" + obsdate + "%' or FileName like 'R" + obsdate + "%'" records = saltmysql.select(sdb, "FileName, Proposal_Code", table, logic) # exit if no data was taken for HRS if len(records) == 0: return # Loop through each of the files and create the directories if needed image_dict = {} for filename, propid in records: if propid.count("CAL"): # check for directory and create structure caldir = "/salt/HRS_Cals/%s/" % propid if not os.path.isdir(caldir): os.mkdir(caldir) yeardir = "%s%s/" % (caldir, obsdate[0:4]) if not os.path.isdir(yeardir): os.mkdir(yeardir) daydir = "%s%s/%s/" % (caldir, obsdate[0:4], obsdate[4:8]) if not os.path.isdir(daydir): os.mkdir(daydir) rawdir = "%s%s/%s/raw/" % (caldir, obsdate[0:4], obsdate[4:8]) if not os.path.isdir(rawdir): os.mkdir(rawdir) prodir = "%s%s/%s/product/" % (caldir, obsdate[0:4], obsdate[4:8]) if not os.path.isdir(prodir): os.mkdir(prodir) # create the symlinks to the files infile = "/salt/data/%s/%s/hrs/raw/%s" % (obsdate[0:4], obsdate[4:8], filename) link = "%s%s" % (rawdir, filename) saltio.symlink(infile, link, clobber) infile = "/salt/data/%s/%s/hrs/product/mbgph%s" % (obsdate[0:4], obsdate[4:8], filename) link = "%smbgph%s" % (prodir, filename) saltio.symlink(infile, link, clobber) log.message( "Copied %s to the HRS_CAL/%s directory" % (filename, propid), with_header=False, with_stdout=verbose ) # create log of files image_info = get_image_info(sdb, filename) try: image_dict[propid].append([image_info]) except: image_dict[propid] = [[image_info]] # create log of each file--currently not enough information # in database to do this nightlog = "/salt/logs/sanightlogs/%s.log" % obsdate for k in image_dict: nightlink = "/salt/HRS_Cals/%s/%s/%s/%s.log" % (k, obsdate[0:4], obsdate[4:8], obsdate) saltio.symlink(nightlog, nightlink, clobber) fout = open("/salt/HRS_Cals/%s/%s/%s/%s.log" % (k, obsdate[0:4], obsdate[4:8], k), "w") for imlist in image_dict[k]: for info in imlist: for f in info: fout.write("%20s " % str(f)) fout.write("\n") fout.close()
def cleandata(self, filename, iminfo=None, prodir='.', interp='linear', cleanup=True, clobber=False, logfile='saltclean.log', reduce_image=True, display_image=False, verbose=True): """Start the process to reduce the data and produce a single mosaicked image""" #print filename status=0 #create the input file name infile=os.path.basename(filename) rawpath=os.path.dirname(filename) outpath='./' outfile=outpath+'mbxp'+infile #print infile, rawpath, outpath #If it is a bin file, pre-process the data if filename.count('.bin'): print("I can't handle this yet") #ignore bcam files if infile.startswith('B'): return iminfo #check to see if it exists and return if clobber is no if os.path.isfile(outfile) and not clobber: return iminfo #handle HRS data print(filename) if infile.startswith('H') or infile.startswith('R'): shutil.copy(filename, outfile) return iminfo if filename.count('.txt'): return iminfo #remove frame transfer data #detmode=iminfo[headerList.index('DETMODE')].strip().upper() #if detmode=='FT' or detmode=='FRAME TRANSFER': return iminfo #reduce the data if reduce_image: try: quickclean(filename, interp, cleanup, clobber, logfile, verbose) except Exception as e: print(e) return iminfo #load the data into the SDB if self.sdbhost and self.update: try: log=None #open(logfile, 'a') sdb=saltmysql.connectdb(self.sdbhost, self.sdbname, self.sdbuser, self.password) sdbloadfits(filename, sdb, log, False) print('SDBLOADFITS: SUCCESS') except Exception as e: print('SDBLOADFITSERROR:', e) #display the image if display_image: print("Displaying %s" % outfile) try: display(outfile) except Exception as e: print(e) #if the images are imaging data, run sextractor on them name=iminfo[0] propcode=iminfo[headerList.index('PROPID')].strip().upper() obsmode=iminfo[headerList.index('OBSMODE')].strip().upper() detmode=iminfo[headerList.index('DETMODE')].strip().upper() obstype=iminfo[headerList.index('CCDTYPE')].strip().upper() target=iminfo[headerList.index('OBJECT')].strip().upper() lampid=iminfo[headerList.index('LAMPID')].strip().upper() print(detmode) if (obsmode=='IMAGING' or obsmode=='FABRY-PEROT' ) and (detmode=='NORMAL' or detmode=='FT' or detmode=='FRAME TRANSFER'): i=headerList.index('CCDSUM') ccdbin=int(iminfo[i].split()[0]) pix_scale=0.14*ccdbin r_ap=1.5/pix_scale #measure the photometry print("RUNNING PHOTOMETRY") quickphot(outfile, r_ap, pix_scale, self.sexfile, clobber, logfile, verbose) #load the regions #if display_image: regions(outfile) #measure the background statistics #hdu=pyfits.open(outfile) #bmean, bmidpt, bstd=saltstat.iterstat(hdu[1].data, 5, 3) bmean, bmidpt, bstd=(-1,-1,-1) #hdu.close() print("---------Background Statistics---------") print("%10s %10s %10s" % ('Mean', 'MidPoint', 'STD')) print("%10.2f %10.2f %10.2f" % (bmean, bmidpt, bstd)) iminfo[headerList.index('BMEAN')]='%f' % (bmean) iminfo[headerList.index('BMIDPT')]='%f' % (bmidpt) iminfo[headerList.index('BSTD')]='%f' % (bstd) #measure the seeing outtxt=outfile.replace('fits', 'cat') try: mag_arr, fwhm_arr=np.loadtxt(outtxt, usecols=(2,10), unpack=True) mean, std, norm, peak=seeing_stats(fwhm_arr) see=mean*pix_scale nsources=len(mag_arr) except: see=-1 nsources=-1 iminfo[headerList.index('NSOURCES')]='%i' % nsources iminfo[headerList.index('SEEING')]='%f' % see #self.emit(QtCore.SIGNAL("updateimlist(str,str,str)"), (name, 'SEEING', '%f' % see)) #self.emit(QtCore.SIGNAL("updatespec(QString)"), name) #If the images are spectral images, run specreduce on them if obsmode=='SPECTROSCOPY': # and not(target in ['FLAT', 'BIAS']): y1,y2=quickspec(outfile, lampid, objsection=self.objsection, findobj=True, clobber=True, logfile=logfile, verbose=verbose) print(y1,y2) specfile=outpath+'smbxp'+infile.split('.fits')[0]+'.txt' #In here, so it doesn't break when the first checkdata runs try: self.specTab.updaterange(y1,y2) self.emit(QtCore.SIGNAL("updatespec(QString)"), infile) except Exception as e: message="SALTFIRST--ERROR: Could not wavelength calibrate %s because %s" % (infile, e) fout=open(logfile, 'a') fout.write(message) print(message) if obsmode=='FABRY-PEROT' and obstype=='ARC': try: flatimage='/home/ccd/smc/FPFLAT.fits' profile=os.path.basename(outfile) fpcal(profile, flatimage=flatimage, minflat=18000, niter=5, bthresh=5, displayimage=True, clobber=True, logfile=logfile, verbose=verbose) except Exception as e: message="SALTFIRST--ERROR: Could not calibrate FP data te %s because %s" % (infile, e) fout=open(logfile, 'a') fout.write(message) print(message) #check for fast mode operation if self.update: runfast(name, propcode,self.obsdate,self.server, self.readme, self.sdbhost,self.sdbname, self.sdbuser, self.password) return iminfo
readme = iraf.osfn('pipetools$html/readme.template') if not os.path.isfile(nightlog): nightlog = '' message = 'No night log {} found'.format(nightlog) log.warning(message) if (rssrawnum > 0 or scmrawnum > 0 or hrsrawnum>0): salthtml(propcode=propcode,scamobslog=scmobslog,rssobslog=rssobslog, hrsobslog=hrsobslog, htmlpath=htmlpath, nightlog=nightlog,readme=readme,clobber=True,logfile=logfile, verbose=verbose) #add a pause to allow syncing of the databases time.sleep(10) #Add in the environmental information sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, sdbpass) if (rssrawnum > 0 or scmrawnum > 0 or hrsrawnum>0): propids=saltmysql.getpropcodes(sdb, obsdate) for pid in propids: try: saltelsdata(pid, obsdate, elshost, elsname, elsuser, elspass, sdbhost,sdbname,sdbuser, sdbpass, clobber, logfile,verbose) except: continue try: outfile='%s_%s_elsdata.fits' % (pid, obsdate) outdir='%s/doc/' % (pid) shutil.move(outfile, outdir) except: os.remove(outfile)
def findcal(obsdate, sdbhost, sdbname, sdbuser, password): """Find all of the unique configuration settings used on a night and insert it into the database obsdate--Observation date in YYYYMMDD sdbhost--host name for sdb sdbname--name of the sdb sdbuser--user for the sdb password--sdb password """ #connect to the db sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #get the nightinfo id logic = "Date='%4s-%2s-%2s'" % (obsdate[0:4], obsdate[4:6], obsdate[6:8]) results = saltmysql.select(sdb, 'NightInfo_Id', 'NightInfo', logic) night_id = results[0][0] #select all the scam data from this obsdate cmd_select = 'd.FileName, d.FileData_Id, f.CCDTYPE, d.DETMODE, CCDSUM, GAINSET, ROSPEED, FILTER' cmd_table = ''' FileData as d left join FitsHeaderImage as f using (FileData_Id) left join FitsHeaderSalticam using (FileData_Id) ''' cmd_logic = "d.FileName like 'S" + obsdate + "%' and f.CCDTYPE='OBJECT'" results = saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic) #loop through all the results and return only the Set of identical results caldict = create_caldict(results) #insert the results into the database for k in caldict: #first check to see if it has already been entered record = saltmysql.select(sdb, 'FileData_Id', 'SalticamNightlyCalibration', 'FileData_Id=%i' % k) if len(record) < 1: #check for block_id blockid = saltmysql.select(sdb, 'Block_Id', 'FileData', 'FileData_Id=%i' % k)[0][0] #get the calibration types requested if blockid: request = saltmysql.select(sdb, 'SalticamCalibrationType_Id', 'SalitcamCalibration', 'Block_Id=%i' % blockid) for cid in request: cid = cid[0] cmd_insert = 'NightInfo_Id=%i, FileData_Id=%i, SatlicamCalibrationType_Id=%i' % ( night_id, k, cid) #saltmysql.insert(sdb, cmd_insert, 'SalitcamNightlyCalibration') print k, " ".join([str(k) for k in caldict[k]]) #list of rss calibration types #+-----------------------+----------------------------------+ #| RssCalibrationType_Id | CalibrationType | #+-----------------------+----------------------------------+ #| 2 | Arc | #| 1 | Bias | #| 14 | Imaging flat - Lamp | #| 15 | Imaging flat - Twilight | #| 3 | Spectroscopic flat - Lamp | #| 4 | Spectroscopic flat - Twilight | #| 12 | Standard - Circular polarimetric | #| 13 | Standard - Lick | #| 9 | Standard - Linear polarimetric | #| 5 | Standard - Photometric | #| 8 | Standard - RV | #| 11 | Standard - Smooth spectrum | #| 7 | Standard - Spectrophotometric | #| 6 | Standard - Spectroscopic | #| 10 | Standard - Unpolarised | #+-----------------------+----------------------------------+ #select all the RSS data from this obsdate rssheaderlist = 'f.CCDTYPE, d.DETMODE, d.OBSMODE, CCDSUM, GAINSET, ROSPEED, FILTER, GRATING, GRTILT, AR_STA, MASKID' cmd_select = 'd.FileName,d.FileData_Id, %s' % rssheaderlist # CCDTYPE, DETMODE, OBSMODE, CCDSUM, GAINSET, ROSPEED, FILTER, GRATING, GR_STA, AR_STA, MASKID' cmd_table = ''' FileData as d left join FitsHeaderImage as f using (FileData_Id) left join FitsHeaderRss using (FileData_Id) join ProposalCode using (ProposalCode_Id) ''' cmd_logic = "d.FileName like 'P" + obsdate + "%' and CCDTYPE='OBJECT' and Proposal_Code not like 'CAL_SPST' and GRTILT > 0" results = saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic) #loop through all the results and return only the Set of identical results caldict = create_caldict(results) #insert the rss results into the database for k in caldict: #first check to see if it has already been entered record = saltmysql.select(sdb, 'FileData_Id', 'RssNightlyCalibration', 'FileData_Id=%i' % k) if len(record) < 1: #period for checking for SPST. If the uses requests this, it gets set to # 7 days, but the default is taken within the last month for non-requests period = 30 #check for block_id blockid = saltmysql.select(sdb, 'Block_Id', 'FileData', 'FileData_Id=%i' % k)[0][0] #get the calibration types requested if blockid: request = saltmysql.select(sdb, 'RssCalibrationType_Id', 'RssCalibration', 'Block_Id=%i' % blockid) for cid in request: cid = cid[0] #check to see if the request is already in the database or if a similar request has #been taken recently if cid == 1: #bias if not checkforbias(sdb, k): cmd_insert = 'NightInfo_Id=%i, FileData_Id=%i, RssCalibrationType_Id=%i' % ( night_id, k, cid) saltmysql.insert(sdb, cmd_insert, 'RssNightlyCalibration') elif cid in [3, 4]: #flats if not checkforflats(sdb, k, cid, rssheaderlist, instr='rss', keylist=caldict[k], period=90): cmd_insert = 'NightInfo_Id=%i, FileData_Id=%i, RssCalibrationType_Id=%i' % ( night_id, k, cid) saltmysql.insert(sdb, cmd_insert, 'RssNightlyCalibration') elif cid == 7: #specstand period = 7 #print period, k, caldict[k] if not checkforspst( sdb, k, caldict[k], rssheaderlist, period=period): cmd_insert = 'NightInfo_Id=%i, FileData_Id=%i, RssCalibrationType_Id=7' % ( night_id, k) saltmysql.insert(sdb, cmd_insert, 'RssNightlyCalibration') else: cmd_insert = 'NightInfo_Id=%i, FileData_Id=%i, RssCalibrationType_Id=%i' % ( night_id, k, cid) saltmysql.insert(sdb, cmd_insert, 'RssNightlyCalibration') print k, cid, " ".join([str(w) for w in caldict[k]]) #select all the HRS data from this obsdate #+-----------------------+-------------------------------+ #| HrsCalibrationType_Id | CalibrationType | #+-----------------------+-------------------------------+ #| 3 | Arc - Calsys | #| 2 | Arc - Internal | #| 1 | Bias | #| 4 | Spectroscopic flat - Lamp | #| 5 | Spectroscopic flat - Twilight | #| 11 | Standard - Lick | #| 6 | Standard - Photometric | #| 9 | Standard - RV | #| 10 | Standard - Smooth spectrum | #| 8 | Standard - Spectrophotometric | #| 7 | Standard - Spectroscopic | #+-----------------------+-------------------------------+ hrsheaderlist = 'f.CCDTYPE, d.DETMODE, d.OBSMODE, CCDSUM, GAINSET, ROSPEED' cmd_select = 'd.FileName,d.FileData_Id, %s' % hrsheaderlist cmd_table = ''' FileData as d left join FitsHeaderImage as f using (FileData_Id) left join FitsHeaderHrs using (FileData_Id) join ProposalCode using (ProposalCode_Id) ''' cmd_logic = "d.FileName like 'H" + obsdate + "%' and CCDTYPE='Science' and Proposal_Code not like 'CAL_SPST'" results = saltmysql.select(sdb, cmd_select, cmd_table, cmd_logic) #loop through all the results and return only the Set of identical results caldict = create_caldict(results) for k in caldict: #first check to see if it has already been entered record = saltmysql.select(sdb, 'FileData_Id', 'HrsNightlyCalibration', 'FileData_Id=%i' % k) if len(record) < 1: #period for checking for SPST. If the uses requests this, it gets set to # 7 days, but the default is taken within the last month for non-requests period = 30 #check for block_id blockid = saltmysql.select(sdb, 'Block_Id', 'FileData', 'FileData_Id=%i' % k)[0][0] #get the calibration types requested if not checkforbias(sdb, k, instr='hrs'): cmd_insert = 'NightInfo_Id=%i, FileData_Id=%i, HrsCalibrationType_Id=%i' % ( night_id, k, 1) saltmysql.insert(sdb, cmd_insert, 'HrsNightlyCalibration') return
def saltpipe(obsdate,pinames,archive,ftp,email,emserver,emuser,empasswd,bcc, qcpcuser,qcpcpasswd, ftpserver,ftpuser,ftppasswd,sdbhost, sdbname, sdbuser, sdbpass, elshost, elsname, elsuser, elspass, median,function,order,rej_lo,rej_hi,niter,interp, clobber, runstatus, logfile,verbose): # set up basedir=os.getcwd() propcode=pinames sender = emuser + '@salt.ac.za' recipient = sender emessage = '' emailfile = '../piemaillist/email.lis' # check the observation date is sensible if ('/' in obsdate or '20' not in obsdate or len(obsdate) != 8): emessage = 'Observation date does not look sensible - YYYYMMDD\n' raise SaltError(emessage) # stop if the obsdate temporary directory already exists obsdir='%s' % obsdate if os.path.exists(obsdir): emessage += 'The temporary working directory ' + os.getcwd() + '/' emessage += obsdate + ' already exists. ' raise SaltError(emessage) # create a temporary working directory and move to it saltio.createdir(obsdir) saltio.changedir(obsdir) workpath = saltio.abspath('.') # test the logfile logfile = workpath+logfile logfile = saltio.logname(logfile) #note the starttime starttime = time.time() #start logging with logging(logfile,debug) as log: #connect to the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, sdbpass) #get the nightinfo id nightinfoid=saltmysql.getnightinfoid(sdb, obsdate) #Get the list of proposal codes state_select='Proposal_Code' state_tables='Proposal join ProposalCode using (ProposalCode_Id)' state_logic="current=1" records=saltmysql.select(sdb, state_select, state_tables, state_logic) propids=[k[0] for k in records] # Calculate the current date currentdate=salttime.currentobsdate() # are the arguments defined saltio.argdefined('obsdate',obsdate) # check email and ftp arguments are consistent if email and not ftp: message = 'ERROR: SALTPIPE -- cannot send email to PI(s) unless data is transferred ' message += 'to the FTP server; use ftp=\'yes\' email=\'yes\'' raise SaltError(message) # identify a potential list of keyword edits keyfile = '../newheadfiles/list_newhead_' + obsdate if not os.path.isfile(keyfile): message = '\nSALTPIPE -- keyword edits ' + keyfile + ' not found locally' log.message(message) # check directories for the raw RSS data rssrawpath = makerawdir(obsdate, 'rss') # check directories for the raw SALTICAM data scmrawpath = makerawdir(obsdate, 'scam') # check raw directories for the disk.file record and find last file number #check rss data lastrssnum = checkfordata(rssrawpath, 'P', obsdate, log) #check scame data lastscmnum = checkfordata(scmrawpath, 'S', obsdate, log) #check for HRS Data--not filedata yet, so cannot check if lastrssnum == 1 and lastscmnum == 1: message = 'SALTPIPE -- no SALTICAM or RSS data obtained on ' + obsdate emessage += '\n' + message + '\n' log.message(message) #copy the data to the working directory if lastrssnum > 1: message = 'Copy ' + rssrawpath + ' --> ' + workpath + 'raw/' log.message(message) saltio.copydir(rssrawpath,'rss/raw') if lastscmnum > 1: message = 'Copy ' + scmrawpath + ' --> ' + workpath + 'raw/' log.message(message) saltio.copydir(scmrawpath,'scam/raw') #copy and pre-process the HRS data try: hrsbrawpath = makerawdir(obsdate, 'hbdet') saltio.createdir('hrs') saltio.createdir('hrs/raw') message = 'Copy ' + hrsbrawpath + ' --> ' + workpath + 'raw/' log.message(message) salthrspreprocess(hrsbrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose) lasthrbnum=len(glob.glob('hrs/raw/*fits')) except Exception,e: log.message('Could not copy HRS data because %s' % e) lasthrbnum=0 try: hrsrrawpath = makerawdir(obsdate, 'hrdet') message = 'Copy ' + hrsrrawpath + ' --> ' + workpath + 'raw/' log.message(message) salthrspreprocess(hrsrrawpath, 'hrs/raw/', clobber=True, log=log, verbose=verbose) lasthrsnum=max(lasthrbnum, len(glob.glob('hrs/raw/*fits'))) except Exception,e: log.message('Could not copy HRS data because %s' % e) lasthrsnum=lasthrbnum
def saltfast(obsdate, readme, emailserver, username,password, bcc, sdbhost, sdbname,sdbuser, clobber,logfile,verbose): # set up nightlog = '' with logging(logfile,debug) as log: # determine current directory workdir = os.getcwd() logfile = workdir + '/' + os.path.basename(logfile) #log into the database sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser,password) #query the data base for all of the data taken last night and the proposal codes select='FileName, Target_Name, Proposal_Code' table='FileData join ProposalCode using (ProposalCode_Id)' logic="FileName like '%"+obsdate+"%'" records=saltmysql.select(sdb, select, table, logic) if len(records)==0: message='No data taken on %s\n' % obsdate log.message(message) return #determine the list of files, targets, and propcodes file_list=[] target_list=[] propcode_list=[] propcode_dict={} for name,targ,pid in records: file_list.append(name) target_list.append(targ) propcode_list.append(pid) try: propcode_dict[pid].append(targ) except KeyError: propcode_dict[pid]=[targ] # check to see if any of the PI directories requires fast response for pid in propcode_dict.keys(): target=set(propcode_dict[pid]) if checkforfast(pid, target, sdb): #log the move message='Copying data from %s to the ftp site' % pid log.message(message, with_stdout=verbose) #get the username piptuser=saltmysql.getpiptusername(sdb, pid) #create the fast directory fastdir='/salt/ftparea/%s/FAST/' % piptuser if os.path.isdir(fastdir): saltio.deletedir(fastdir) os.mkdir(fastdir) for i in range(len(file_list)): if propcode_list[i]==pid: #create the data filename if file_list[i].startswith('S'): instr='scam' elif file_list[i].startswith('P'): instr='rss' filepath='/salt/%s/data/%s/%s/raw/%s' % (instr, obsdate[0:4], obsdate[4:8], file_list[i]) saltio.copy(filepath, fastdir) #make the temporary readme mailmessage = maketempreadmefast(pid, sdb, readme) #send the email subject='SALT Raw Data available for %s' % pid sender='*****@*****.**' recipient=saltmysql.getpiptemail(sdb, piptuser) bcc='*****@*****.**'#[email protected]'
def saltnightinfo( obsdate, sdbhost="sdb.saao", sdbname="sdb", sdbuser="", password="", clobber=False, logfile="saltlog.log", verbose=True, ): """Update the nightinfo table from the SOMMI log """ with logging(logfile, debug) as log: # connect the database sdb = saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) # get the nightinfo_id night_id = saltmysql.getnightinfoid(sdb, obsdate) print night_id # get the start and end of twilight nightstart = saltmysql.select(sdb, "EveningTwilightEnd", "NightInfo", "NightInfo_Id=%i" % night_id)[0][0] nightend = saltmysql.select(sdb, "MorningTwilightStart", "NightInfo", "NightInfo_Id=%i" % night_id)[0][0] print nightstart print nightend print (nightend - nightstart).seconds # download the SOMMI log from the night try: sommi_log = saltmysql.select(sdb, "SONightLog", "NightLogs", "NightInfo_Id=%i" % night_id)[0][0] except: raise SaltError("Unable to read SOMMI log in the database for %s" % obsdate) # set up the time for the night try: ntime = (nightend - nightstart).seconds except: raise SaltError("Unable to read night length from database for %s" % obsdate) # parse the sommi log slog = sommi_log.split("\n") stime = 0 for i in range(len(slog)): if slog[i].count("Science Time:"): stime = extractinformation(slog[i]) if slog[i].count("Engineering Time:") and not slog[i].count("Non-observing Engineering Time"): etime = extractinformation(slog[i]) if slog[i].count("Time lost to Weather:"): wtime = extractinformation(slog[i]) if slog[i].count("Time lost to Tech. Problems:"): ttime = extractinformation(slog[i]) if slog[i].count("Non-observing Engineering Time:"): ltime = extractinformation(slog[i]) print etime tot_time = stime + etime + wtime + ttime print night_id, ntime, stime, etime, wtime, ttime, ltime, tot_time # insert the information into the database print tot_time, ntime if abs(tot_time - ntime) < 900: message = "Updating NightInfo Table with the following Times:\n" message += "Science Time=%i\n" % stime message += "Engineeringh=%i\n" % etime message += "Time lost to Weather=%i\n" % wtime message += "Time lost to Tech. Problems=%i\n" % ttime message += "Non-observing Engineering Time=%i\n" % ltime log.message(message) insert_state = ( "ScienceTime=%i, EngineeringTime=%i, TimeLostToWeather=%i, TimeLostToProblems=%i, NonObservingEngineeringTime=%i" % (stime, etime, wtime, ttime, ltime) ) table_state = "NightInfo" logic_state = "NightInfo_Id=%i" % night_id saltmysql.update(sdb, insert_state, table_state, logic_state) else: message = "The total time for the night is not equal to the length of the night\n" message += "Night Length=%i\n--------------------\n" % ntime message += "Science Time=%i\n" % stime message += "Engineeringh=%i\n" % etime message += "Time lost to Weather=%i\n" % wtime message += "Time lost to Tech. Problems=%i\n" % ttime message += "Non-observing Engineering Time=%i\n" % ltime message += "Total time for the Night=%i\n" % tot_time log.message(message)
def saltquery(selection, logic, startdate, enddate, outfile=None, sdbhost='sdb.saao', sdbname='sdb', \ sdbuser='', password='', clobber=False, logfile='saltlog.log', verbose=True): """Query the salt database for FITS files """ with logging(logfile,debug) as log: #check the outfiles if not saltio.checkfornone(outfile): outfile=None #check that the output file can be deleted if outfile: saltio.overwrite(outfile, clobber) #open outfile if outfile: fout=saltio.openascii(outfile, 'w') #connect to the database sdb=saltmysql.connectdb(sdbhost,sdbname,sdbuser,password) #Create a list of the selection and then unpack it selection_list = saltio.argunpack('Selection', selection) selection=','.join(selection_list) #write out the header for the outfile outstr='#'+' '.join(['%s' % x for x in selection_list]) if outfile: fout.write(outstr+'\n') else: print outstr #set up the table rsstable=''' FileData left join FitsHeaderImage using (FileData_Id) inner join FitsHeaderRss using (FileData_Id) ''' #set up the table scamtable=''' FileData left join FitsHeaderImage using (FileData_Id) inner join FitsHeaderSalticam using (FileData_Id) ''' #set up the logic logic=makelogic(logic, startdate, enddate) for tab in [rsstable, scamtable]: msg=''' Mysql querying data is: SELECT %s FROM %s WHERE %s ''' % (selection, tab, logic) log.message(msg, with_stdout=verbose) record=saltmysql.select(sdb, selection, tab, logic) print record for r in record: outstr=' '.join(['%s' % x for x in r]) if outfile: fout.write(outstr+'\n') else: print outstr #close outfile if outfile: fout.close()
def saltadvance(images, outpath, obslogfile=None, gaindb=None,xtalkfile=None, geomfile=None,subover=True,trim=True,masbias=None, subbias=False, median=False, function='polynomial', order=5,rej_lo=3, rej_hi=3,niter=5,interp='linear', sdbhost='',sdbname='',sdbuser='', password='', clobber=False, cleanup=True, logfile='salt.log', verbose=True): """SALTADVANCE provides advanced data reductions for a set of data. It will sort the data, and first process the biases, flats, and then the science frames. It will record basic quality control information about each of the steps. """ plotover=False #start logging with logging(logfile,debug) as log: # Check the input images infiles = saltio.argunpack ('Input',images) infiles.sort() # create list of output files outpath=saltio.abspath(outpath) #log into the database sdb=saltmysql.connectdb(sdbhost, sdbname, sdbuser, password) #does the gain database file exist if gaindb: dblist= saltio.readgaindb(gaindb) else: dblist=[] # does crosstalk coefficient data exist if xtalkfile: xtalkfile = xtalkfile.strip() xdict = saltio.readxtalkcoeff(xtalkfile) else: xdict=None #does the mosaic file exist--raise error if no saltio.fileexists(geomfile) # Delete the obslog file if it already exists if os.path.isfile(obslogfile) and clobber: saltio.delete(obslogfile) #read in the obsveration log or create it if os.path.isfile(obslogfile): msg='The observing log already exists. Please either delete it or run saltclean with clobber=yes' raise SaltError(msg) else: headerDict=obslog(infiles, log) obsstruct=createobslogfits(headerDict) saltio.writefits(obsstruct, obslogfile) #create the list of bias frames and process them filename=obsstruct.data.field('FILENAME') detmode=obsstruct.data.field('DETMODE') obsmode=obsstruct.data.field('OBSMODE') ccdtype=obsstruct.data.field('CCDTYPE') propcode=obsstruct.data.field('PROPID') masktype=obsstruct.data.field('MASKTYP') #set the bias list of objects biaslist=filename[(ccdtype=='ZERO')*(propcode=='CAL_BIAS')] masterbias_dict={} for img in infiles: if os.path.basename(img) in biaslist: #open the image struct=fits.open(img) bimg=outpath+'bxgp'+os.path.basename(img) #print the message if log: message='Processing Zero frame %s' % img log.message(message, with_stdout=verbose) #process the image struct=clean(struct, createvar=True, badpixelstruct=None, mult=True, dblist=dblist, xdict=xdict, subover=subover, trim=trim, subbias=False, bstruct=None, median=median, function=function, order=order, rej_lo=rej_lo, rej_hi=rej_hi, niter=niter, plotover=plotover, log=log, verbose=verbose) #update the database updatedq(os.path.basename(img), struct, sdb) #write the file out # housekeeping keywords fname, hist=history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0],'SPREPARE', 'Images have been prepared', hist) saltkey.new('SGAIN',time.asctime(time.localtime()),'Images have been gain corrected',struct[0]) saltkey.new('SXTALK',time.asctime(time.localtime()),'Images have been xtalk corrected',struct[0]) saltkey.new('SBIAS',time.asctime(time.localtime()),'Images have been de-biased',struct[0]) # write FITS file saltio.writefits(struct,bimg, clobber=clobber) saltio.closefits(struct) #add files to the master bias list masterbias_dict=compareimages(struct, bimg, masterbias_dict, keylist=biasheader_list) #create the master bias frame for i in masterbias_dict.keys(): bkeys=masterbias_dict[i][0] blist=masterbias_dict[i][1:] mbiasname=outpath+createmasterbiasname(blist, bkeys) bfiles=','.join(blist) saltcombine(bfiles, mbiasname, method='median', reject='sigclip', mask=False, weight=False, blank=0, scale=None, statsec=None, lthresh=3, \ hthresh=3, clobber=False, logfile=logfile,verbose=verbose) #create the list of flatfields and process them flatlist=filename[ccdtype=='FLAT'] masterflat_dict={} for img in infiles: if os.path.basename(img) in flatlist: #open the image struct=fits.open(img) fimg=outpath+'bxgp'+os.path.basename(img) #print the message if log: message='Processing Flat frame %s' % img log.message(message, with_stdout=verbose) #process the image struct=clean(struct, createvar=True, badpixelstruct=None, mult=True, dblist=dblist, xdict=xdict, subover=subover, trim=trim, subbias=False, bstruct=None, median=median, function=function, order=order, rej_lo=rej_lo, rej_hi=rej_hi, niter=niter, plotover=plotover, log=log, verbose=verbose) #update the database updatedq(os.path.basename(img), struct, sdb) #write the file out # housekeeping keywords fname, hist=history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0],'SPREPARE', 'Images have been prepared', hist) saltkey.new('SGAIN',time.asctime(time.localtime()),'Images have been gain corrected',struct[0]) saltkey.new('SXTALK',time.asctime(time.localtime()),'Images have been xtalk corrected',struct[0]) saltkey.new('SBIAS',time.asctime(time.localtime()),'Images have been de-biased',struct[0]) # write FITS file saltio.writefits(struct,fimg, clobber=clobber) saltio.closefits(struct) #add files to the master bias list masterflat_dict=compareimages(struct, fimg, masterflat_dict, keylist=flatheader_list) #create the master flat frame for i in masterflat_dict.keys(): fkeys=masterflat_dict[i][0] flist=masterflat_dict[i][1:] mflatname=outpath+createmasterflatname(flist, fkeys) ffiles=','.join(flist) saltcombine(ffiles, mflatname, method='median', reject='sigclip', mask=False, weight=False, blank=0, scale=None, statsec=None, lthresh=3, \ hthresh=3, clobber=False, logfile=logfile,verbose=verbose) #process the arc data arclist=filename[(ccdtype=='ARC') * (obsmode=='SPECTROSCOPY') * (masktype=='LONGSLIT')] for i, img in enumerate(infiles): nimg=os.path.basename(img) if nimg in arclist: #open the image struct=fits.open(img) simg=outpath+'bxgp'+os.path.basename(img) obsdate=os.path.basename(img)[1:9] #print the message if log: message='Processing ARC frame %s' % img log.message(message, with_stdout=verbose) struct=clean(struct, createvar=False, badpixelstruct=None, mult=True, dblist=dblist, xdict=xdict, subover=subover, trim=trim, subbias=False, bstruct=None, median=median, function=function, order=order, rej_lo=rej_lo, rej_hi=rej_hi, niter=niter, plotover=plotover, log=log, verbose=verbose) # write FITS file saltio.writefits(struct,simg, clobber=clobber) saltio.closefits(struct) #mosaic the images mimg=outpath+'mbxgp'+os.path.basename(img) saltmosaic(images=simg, outimages=mimg,outpref='',geomfile=geomfile, interp=interp,cleanup=True,clobber=clobber,logfile=logfile, verbose=verbose) #remove the intermediate steps saltio.delete(simg) #measure the arcdata arcimage=outpath+'mbxgp'+nimg dbfile=outpath+obsdate+'_specid.db' lamp = obsstruct.data.field('LAMPID')[i] lamp = lamp.replace(' ', '') lampfile = iraf.osfn("pysalt$data/linelists/%s.salt" % lamp) print arcimage, lampfile, os.getcwd() specidentify(arcimage, lampfile, dbfile, guesstype='rss', guessfile='', automethod='Matchlines', function='legendre', order=3, rstep=100, rstart='middlerow', mdiff=20, thresh=3, startext=0, niter=5, smooth=3, inter=False, clobber=True, logfile=logfile, verbose=verbose) try: ximg = outpath+'xmbxgp'+os.path.basename(arcimage) specrectify(images=arcimage, outimages=ximg, outpref='', solfile=dbfile, caltype='line', function='legendre', order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None, blank=0.0, conserve=True, nearest=True, clobber=True, logfile=logfile, verbose=verbose) except: pass #process the science data for i, img in enumerate(infiles): nimg=os.path.basename(img) if not (nimg in flatlist or nimg in biaslist or nimg in arclist): #open the image struct=fits.open(img) if struct[0].header['PROPID'].count('CAL_GAIN'): continue simg=outpath+'bxgp'+os.path.basename(img) #print the message if log: message='Processing science frame %s' % img log.message(message, with_stdout=verbose) #Check to see if it is RSS 2x2 and add bias subtraction instrume=saltkey.get('INSTRUME', struct[0]).strip() gainset = saltkey.get('GAINSET', struct[0]) rospeed = saltkey.get('ROSPEED', struct[0]) target = saltkey.get('OBJECT', struct[0]).strip() exptime = saltkey.get('EXPTIME', struct[0]) obsmode = saltkey.get('OBSMODE', struct[0]).strip() detmode = saltkey.get('DETMODE', struct[0]).strip() masktype = saltkey.get('MASKTYP', struct[0]).strip() xbin, ybin = saltkey.ccdbin( struct[0], img) obsdate=os.path.basename(img)[1:9] bstruct=None crtype=None thresh=5 mbox=11 bthresh=5.0, flux_ratio=0.2 bbox=25 gain=1.0 rdnoise=5.0 fthresh=5.0 bfactor=2 gbox=3 maxiter=5 subbias=False if instrume=='RSS' and gainset=='FAINT' and rospeed=='SLOW': bfile='P%sBiasNM%ix%iFASL.fits' % (obsdate, xbin, ybin) if os.path.exists(bfile): bstruct=fits.open(bfile) subbias=True if detmode=='Normal' and target!='ARC' and xbin < 5 and ybin < 5: crtype='edge' thresh=5 mbox=11 bthresh=5.0, flux_ratio=0.2 bbox=25 gain=1.0 rdnoise=5.0 fthresh=5.0 bfactor=2 gbox=3 maxiter=3 #process the image struct=clean(struct, createvar=True, badpixelstruct=None, mult=True, dblist=dblist, xdict=xdict, subover=subover, trim=trim, subbias=subbias, bstruct=bstruct, median=median, function=function, order=order, rej_lo=rej_lo, rej_hi=rej_hi, niter=niter, plotover=plotover, crtype=crtype,thresh=thresh,mbox=mbox, bbox=bbox, \ bthresh=bthresh, flux_ratio=flux_ratio, gain=gain, rdnoise=rdnoise, bfactor=bfactor, fthresh=fthresh, gbox=gbox, maxiter=maxiter, log=log, verbose=verbose) #update the database updatedq(os.path.basename(img), struct, sdb) #write the file out # housekeeping keywords fname, hist=history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0],'SPREPARE', 'Images have been prepared', hist) saltkey.new('SGAIN',time.asctime(time.localtime()),'Images have been gain corrected',struct[0]) saltkey.new('SXTALK',time.asctime(time.localtime()),'Images have been xtalk corrected',struct[0]) saltkey.new('SBIAS',time.asctime(time.localtime()),'Images have been de-biased',struct[0]) # write FITS file saltio.writefits(struct,simg, clobber=clobber) saltio.closefits(struct) #mosaic the files--currently not in the proper format--will update when it is if not saltkey.fastmode(saltkey.get('DETMODE', struct[0])): mimg=outpath+'mbxgp'+os.path.basename(img) saltmosaic(images=simg, outimages=mimg,outpref='',geomfile=geomfile, interp=interp,fill=True, cleanup=True,clobber=clobber,logfile=logfile, verbose=verbose) #remove the intermediate steps saltio.delete(simg) #if the file is spectroscopic mode, apply the wavelength correction if obsmode == 'SPECTROSCOPY' and masktype.strip()=='LONGSLIT': dbfile=outpath+obsdate+'_specid.db' try: ximg = outpath+'xmbxgp'+os.path.basename(img) specrectify(images=mimg, outimages=ximg, outpref='', solfile=dbfile, caltype='line', function='legendre', order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None, blank=0.0, conserve=True, nearest=True, clobber=True, logfile=logfile, verbose=verbose) except Exception, e: log.message('%s' % e) #clean up the results if cleanup: #clean up the bias frames for i in masterbias_dict.keys(): blist=masterbias_dict[i][1:] for b in blist: saltio.delete(b) #clean up the flat frames for i in masterflat_dict.keys(): flist=masterflat_dict[i][1:] for f in flist: saltio.delete(f)