def __init__(self, logger, ev_quit, timeout=0.1, interval=1.0, fifopath='/app/OBC/dat_local/flowctrl.fifo', fitsdir=''): self.fifopath = fifopath self.fitsdir = fitsdir self.timeout = timeout # Interval to wait between DAQ submissions self.interval = interval self.logger = logger self.ev_quit = ev_quit self.queue = Queue.Queue() # Get an instrument configuration object self.insconfig = INSconfig() # Check FIFO access try: self.daqfifo_f = open(self.fifopath, 'w') except IOError, e: print "Can't open '%s' for writing" % self.fifopath sys.exit(1)
def __init__(self, logger, fitsdir, daqdir, ev_quit=threading.Event(), timeout=0.1, interval=1.0, fifopath='/app/OBC/dat_local/flowctrl.fifo'): self.fitsdir = fitsdir super(DAQSink, self).__init__(logger, self.fitsdir, notify_fn=self.archive_notify, notify_thread=True) self.fifopath = fifopath self.daqdir = daqdir self.timeout = timeout # Interval to wait between DAQ submissions self.interval = interval self.ev_quit = ev_quit self.queue = Queue.Queue() # Get an instrument configuration object self.insconfig = INSconfig() # Check FIFO access try: self.daqfifo_f = open(self.fifopath, 'w') #pass except IOError, e: raise DAQError("Can't open '%s' for writing" % self.fifopath)
def __init__(self, channelnum, ev_quit, logger, taskqueue, myhost=None, myport=None, seq_num=None): # Initialize common attributes super(STARSchannel, self).__init__(ev_quit, logger, None, taskqueue, myhost=myhost, myport=myport, seq_num=seq_num) self.channelnum = channelnum self.recvTasker = Task.make_tasker(self.cmd_recv) self.insconfig = INSconfig() # Create an RPC server try: key = ('OBCtoSTARS%d' % channelnum) self.rpcconn = SOSSrpc.clientServerPair(key, initiator=False, logger=self.logger, ev_quit=self.ev_quit, #myhost=self.myhost, recvqueue=taskqueue, func=self.recvTasker) except SOSSrpc.rpcError, e: raise STARSsimError("Error creating rpc client-server pair: %s" % \ str(e))
def __init__(self, logger, cfg): self.logger = logger self.cfg = cfg self.fv = ro.remoteObjectProxy('fitsview') self.fv1 = ro.remoteObjectProxy('fitsview1') self.stars = ro.remoteObjectProxy('STARS') self.insconfig = INSconfig()
def __init__(self): self.insconfig = INSconfig() self.obcpnum = 18 self.obcphost = 'fmos02' self.ev_quit = threading.Event() self.ev_mainquit = threading.Event() self.soss = None self.logger = logging.getLogger('INSintTesterCLI') statusDict = {} self.statusObj = INSint.FetchStatusWrapper_SOSS(statusDict) self.cmdtag = 0
def __init__(self, logger, host): """Constructor. """ self.logger = logger # Create SOSS DB object self.db = SOSSdb.SOSSdb(self.logger, host=host) # For looking up instruments self.ins_config = INSconfig() super(DbTool, self).__init__()
def __init__(self, options, logger): """Constructor for the INSintTester object. """ # Invoke our base class constructor. INSintGUI.__init__(self) self.logger = logger self.insconfig = INSconfig() # Set up default connection parameters from command line self.obcpnum = options.obcpnum self.monunitnum = options.monunitnum if not options.obcphost: self.obcphost = SOSSrpc.get_myhost(short=True) else: self.obcphost = options.obcphost self.raidpath = options.fitsdir self.transfermethod = options.transfermethod self.username = options.username self.password = options.password self.ev_quit = threading.Event() self.ev_mainquit = threading.Event() self.soss = None # Did user specify a Gen2 status source? if options.statussvc: ro_status = ro.remoteObjectProxy(options.statussvc) statusObj = INSint.FetchStatusWrapper_Gen2(ro_status) # Did user specify a SOSS status source? elif options.statushost: from SOSS.status import cachedStatusObj statusDict = cachedStatusObj(options.statushost) statusObj = INSint.FetchStatusWrapper_SOSS(statusDict) # Did user specify a SOSS status file? elif options.statusfile: try: in_f = open(options.statusfile, 'r') statusDict = eval(in_f.read()) in_f.close() except Exception, e: self.showStatus("Error reading status file '%s': %s" % \ (options.statusfile, str(e))) statusDict = {} statusObj = INSint.FetchStatusWrapper_SOSS(statusDict)
def __init__(self, prognum=OSSS_Frame, logger=None, ev_quit=None, frame_func=None): super(OSSS_FRAMEServer, self).__init__(prognum, logger=logger, ev_quit=ev_quit) if not frame_func: frame_func = self.getFrames self.frame_func = frame_func self.lock = threading.RLock() # used if we need to implement our own frame server self.counts = {} self.insconfig = INSconfig()
def __init__(self, svcname, logger, monitor=None, monchannels=[]): self.logger = logger self.monitor = monitor self.monchannels = monchannels # TODO: parameterize tag template (from svcname?) self.tag_template = 'mon.frame.%s.frameSvc' self.insconfig = INSconfig() # For updating status system with session info # TODO: parameterize object self.status = ro.remoteObjectProxy('status') # For mutual exclusion self.lock = threading.RLock()
def main(options, args): # Make sure there is at least 1 argument if len(args) == 0: parser.error("incorrect number of arguments") # Get an instrument configuration object insconfig = INSconfig() # Valididate fifo command try: kind = _FIFO_KIND[options.kind.upper()] except KeyError, e: sys.stderr.write("Not a valid DAQ fifo command: '%s'" % options.kind) sys.exit(1)
def __init__(self, name, logger, monitor, threadPool, automount_propdir=False): self.name = name self.logger = logger self.mon = monitor # For looking up instrument info self.insconfig = INSconfig() # For critical sections in this object self.lock = threading.RLock() # Should we automount proposal id directories self.automount_propdir = automount_propdir # For background tasks self.threadPool = threadPool self.shares = ['logger', 'threadPool'] self.tag = 'SessionManager' super(SessionManager, self).__init__()
def main(options, args): # Get an instrument configuration object insconfig = INSconfig() if options.check_stars: # Only import this if they are asking if file is in STARS import STARSquery for fitspath in args: # Separate leading directory res = fitsutils.getFrameInfoFromPath(fitspath) if not res: print "File name '%s' doesn't match a valid Subaru FITS name." % \ (fitspath) print "Please rename the file as 'XXX{A|Q}DDDDDDDD.fits'" print "Skipping this file..." continue (frameid, fitsfile, fitsdir, inscode, frametype, frameno) = res try: insname = insconfig.getNameByCode(inscode) except KeyError: print "File name '%s' doesn't match a valid Subaru instrument." % \ (fitsfile) print "Skipping this file..." continue if options.check_stars: print "Checking if frame %s is in STARS..." % frameid if STARSquery.GetStarsInfoForFitsID(frameid): print "Frame %s IS in STARS!" % frameid print "Skipping this file..." continue # Look up the instrument number and figure out the path where # the file should end up insnum = insconfig.getNumberByCode(inscode) obcInstrPath = '/mdata/fits/obcp%2d' % insnum obcIndexPath = '/mdata/index' indexfile = frameid + '.index' # Get some metadata by reading the file (if necessary) metadata = {} get_fits_metadata(metadata, fitspath=fitspath, use_mtime=not options.use_ctime) # Substitute path where file should end up metadata['fitspath'] = obcInstrPath + '/' + fitsfile metadata['indexpath'] = obcIndexPath + '/' + indexfile indexpath = options.indexdir + '/' + indexfile # Make the index file if options.create_index: print "Creating index file for %s fits file '%s'..." % \ (insname, fitsfile) create_index_file(metadata, indexpath=indexpath) if options.copy_mdata: # chmod 440 fitspath--what DAQ expects try: os.chmod(fitspath, 0440) except OSError, e: print "Error chmod on '%s': %s" % (fitsfile, str(e)) # FITS file dstpath = obcInstrPath + '/' + fitsfile sshcmd = "ssh [email protected] ls -ld %s" % dstpath res = os.system(sshcmd) if res != 512: print "File may already exist: %s" % dstpath print "Skipping file transfer..." else: scpcmd = "scp -p %s [email protected]:%s" % (fitspath, dstpath) print scpcmd res = 0 res = os.system(scpcmd) if res != 0: print "Error code transferring file: %d" % res # Index file dstpath = obcIndexPath + '/' + indexfile sshcmd = "ssh [email protected] ls -ld %s" % dstpath res = os.system(sshcmd) if res != 512: print "File may already exist: %s" % dstpath print "Skipping file transfer..." else: scpcmd = "scp -p %s [email protected]:%s" % (indexpath, dstpath) print scpcmd res = 0 res = os.system(scpcmd) if res != 0: print "Error code transferring file: %d" % res if options.insert_flowqueue: sshcmd = "ssh [email protected] /soss/SRC/TOOL/bin/DAQobcQueueInsert %s 10000000 -y" % frameid res = 0 print sshcmd res = os.system(sshcmd) if res != 0: print "May have been problem with DAQobcQueueInsert"
def __init__(self): self.count = 1 self.insconfig = INSconfig()
def __init__(self, logger, env, ev_quit=None): self.logger = logger # Convoluted but sure way of getting this module's directory self.mydir = os.path.split(sys.modules[__name__].__file__)[0] if not ev_quit: self.ev_quit = threading.Event() else: self.ev_quit = ev_quit # This is used to cancel long running commands self.ev_cancel = threading.Event() self.ocs = None self.mystatus = None self.mode = 'default' # Contains various instrument configuration self.insconfig = INSconfig() # Width and height of video image self.width = 320 self.height = 240 # Construct numpy used to flip image horizontally self.flip_h = numpy.arange(self.width-1, -1, -1) # Various measured parameters for Fieldmonitor camera # (provided kindly by Takato-san) self.pa_offset = 3.87 self.crpix1 = 164 self.crpix2 = 64 self.cdelt1 = 0.1216 self.cdelt2 = 0.1184 # Thread-safe bunch for storing parameters read/written # by threads executing in this object self.param = Bunch.threadSafeBunch() # Camera defaults (taken from original cap.c program) self.param.brightness = 60000 self.param.hue = 32767 self.param.color = 32767 self.param.contrast = 64000 self.param.whitebal = 32767 # Wait time after starting capture (in usec). self.param.waittime = 0 # Number of subintegrations. self.param.subinteg = 1 # Total integration time self.param.integtime = 10.0 # Number of images stacked self.param.stacksize = 7 # Coadd factor self.param.coadd = 1.0 # Pixel type self.param.pxtype = 'Int16' # Interval between images (secs) self.param.snap_interval = 60 # Interval between status packets (secs) self.param.status_interval = 60 * 1 # Where to store my locally generated FITS files. self.param.fitsdir = "/data/FLDMON/fits" # Where is my flat-fielding FITS file self.param.flatfile = "/data/FLDMON/calib/flat.fits" # Time to enable automatic image sending. # (FieldMonitor camera shutter opens at 6pm) self.param.autosnap_start = "18:00:00" # Time to disable automatic image sending # (FieldMonitor camera shutter closes at 6am) self.param.autosnap_stop = "06:00:00" # This controls automatic generation of FITS files self.ev_auto = threading.Event() # Uncomment this to have FieldMonitor fire up in AUTOSEND mode self.ev_auto.set() # FLDMON pulls status in sets. These are the known sets. self.statusset = Bunch.Bunch() self.statusset.DOME = {'CXWS.TSCV.SHUTTER': 0, } self.statusset.START_EXP = {'FITS.SBR.UT1-UTC': 0, 'FITS.SBR.TELESCOP': 0, 'FITS.SBR.MAINOBCP': 0, 'TSCS.EL': 0, 'TSCS.AZ': 0, 'FITS.SBR.AIRMASS': 0, 'FITS.SBR.ALTITUDE': 0, 'FITS.SBR.AZIMUTH': 0, 'FITS.SBR.EQUINOX': 0, 'FITS.SBR.RA': 0, 'FITS.SBR.DEC': 0, 'FITS.SBR.OUT_HUM': 0, 'FITS.SBR.OUT_TMP': 0, 'FITS.SBR.OUT_WND': 0, 'FITS.SBR.OUT_PRS': 0, 'TSCL.WINDD': 0, 'FITS.FLD.OBSERVER': 0, 'FITS.FLD.OBJECT': 0, 'FITS.FLD.PROP-ID': 0, } self.statusset.END_EXP = {'TSCS.EL': 0, 'FITS.SBR.AIRMASS': 0, 'FITS.SBR.ALTITUDE': 0, ## 'FITS.SBR.AZIMUTH': 0, ## 'FITS.SBR.EQUINOX': 0, ## 'FITS.SBR.RA': 0, ## 'FITS.SBR.DEC': 0, } self.statusset.PROP_TMPL = ['FITS.%3.3s.PROP-ID', 'FITS.%3.3s.OBJECT', 'FITS.%3.3s.OBSERVER', ## 'FITS.%3.3s.OBS-ALOC', ## 'FITS.%3.3s.FOC-POS', ]
# # Make data directories for active instruments. Expects env var # DATAHOME to be set. # import sys, os from cfg.INS import INSdata as INSconfig ic = INSconfig() for name in ic.getNames(active=True): dirpath = os.path.join(os.environ['DATAHOME'], name) if not os.path.isdir(dirpath): print "Making %s..." % dirpath os.mkdir(dirpath)
def __init__(self, logger=None, ev_quit=None, dbpath='taskmgr-db', threadPool=None, numthreads=50, internal_allocs=None, identity=serviceName, monitor='monitor'): self.myname = identity self.myMonName = ('%s.mon' % self.myname) self.mainMonName = monitor # For instrument information self.insconfig = INSconfig() # Handles for subsystems. Is set/reset by setAllocs() method. self.alloc = Bunch.threadSafeBunch() #self.alloc = {} # Dictionary-compat object of 'internal' allocations (local objects to # the TaskManager process) if internal_allocs: self.internal_allocs = internal_allocs else: self.internal_allocs = {} self.internal_allocs['taskmgr'] = self # If no logger is passed in, create a simple one to stderr if not logger: logger = logging.getLogger(self.myname) logger.setLevel(logging.ERROR) fmt = logging.Formatter(ssdlog.STD_FORMAT) stderrHdlr = logging.StreamHandler() stderrHdlr.setFormatter(fmt) logger.addHandler(stderrHdlr) self.logger = logger if not ev_quit: ev_quit = threading.Event() self.ev_quit = ev_quit self.numthreads = numthreads # If we were passed in a thread pool, then use it. If not, # make one. Record whether we made our own or not. if threadPool != None: self.threadPool = threadPool self.mythreadpool = False else: self.threadPool = Task.ThreadPool(logger=self.logger, ev_quit=self.ev_quit, numthreads=self.numthreads) self.mythreadpool = True # If we were handed a monitor then use it, otherwise create a # minimon (will be synced to by external monitor) if self.internal_allocs.has_key('monitor'): self.monitor = self.internal_allocs['monitor'] self.mymonitor = False else: self.monitor = Monitor.Minimon(self.myMonName, self.logger, useSync=False, threadPool=self.threadPool) self.mymonitor = True self.internal_allocs['monitor'] = self.monitor # Our list of loaded modules self.modules = Bunch.threadSafeBunch() # Our directory mapping subsystems and task class names self.taskdir = Bunch.threadSafeBunch(caseless=True) self.taskdir[''] = Bunch.threadSafeBunch(caseless=True) # Generates tags for tasks self.tagger = TagGenerator() # Used for validating subsystem commands self.validator = ParaValidator.ParaValidator(self.logger) # For loading skeleton files self.sk_bank = sk_interp.skBank(obshome) # TODO: this value should be parameterized self.channels = [self.myname] # Default variables we will share with child tasks self.shares = ['logger', 'threadPool', 'alloc', 'shares', 'tagger', 'monitor', 'validator', 'channels', 'insconfig',] # Top level tag that all my tasks will appear underneath in the monitor self.tag = ('mon.tasks.%s' % self.myname) self.qtask = {} self.topTags = []
def __init__(self, logger, ev_quit=None, timeout=0.1, ocsint=None, frameint=None, statusObj=None, allowNoPara=False, env=None, obcpnum=9, threadPool=None, numthreads=20): self.lock = threading.RLock() if not ev_quit: self.ev_quit = threading.Event() else: self.ev_quit = ev_quit self.logger = logger self.numthreads = numthreads self.timeout = timeout self.allowNoPara = allowNoPara # For reading various instrument configuration values self.insconfig = INSconfig() self.obcpnum = obcpnum # Holds various client-settable parameters # Install reasonable defaults for timeouts: # o 15.0 sec for a status request # o 240.0 sec for a file transfer request self.params = threadSafeBunch(timeout_status=15.0, timeout_thrucmd=None, timeout_filexfr=240.0) # Thread pool for autonomous tasks if threadPool: self.threadPool = threadPool self.own_threadPool = False else: self.threadPool = Task.ThreadPool(logger=self.logger, ev_quit=self.ev_quit, numthreads=self.numthreads) self.own_threadPool = True # For task inheritance: self.tag = 'Instrument' self.shares = ['logger', 'threadPool', 'params'] # For our status values self._mystatus = {} # For OCS status values # This could also use a SOSS.status object #self._ocsstatus = threadSafeBunch() # Set up linkage between ourself and frame interface (SOSS.frame) self.frameint = frameint # Set up linkage between ourself and status interface (SOSS.status) self.statusObj = statusObj # Create the "environment" (see loadPersonality()) if env: self.env = env else: self.env = Bunch(INST_PATH='.') # Set up linkage between ourself and OCS interface (SOSS.DAQtk) self.ocsint = ocsint if self.ocsint: self.ocsint.initialize(self) # Holds info about loaded camera modules self.cams = threadSafeBunch() # Holds para file definitions and parameter validators for # commands self.validator = ParaValidator.ParaValidator(self.logger)
def __init__(self, logger, env, ev_quit=None): self.logger = logger self.env = env # Convoluted but sure way of getting this module's directory self.mydir = os.path.split(sys.modules[__name__].__file__)[0] # Set ENV vars necessary for sub-processes to inherit os.environ['QDASVGWHOME'] = ('%s/src.c' % self.mydir) if not ev_quit: self.ev_quit = threading.Event() else: self.ev_quit = ev_quit # This is used to cancel long running commands self.ev_cancel = threading.Event() self.ocs = None self.mystatus = None self.mode = 'default' # dictionary of problem FITS files self.problem_files = {} # Contains various instrument configuration self.insconfig = INSconfig() # Thread-safe bunch for storing parameters read/written # by threads executing in this object self.param = Bunch.threadSafeBunch() # Interval between images (secs) #self.param.snap_interval = 60 * 1 self.param.snap_interval = 1 # Interval between status packets (secs) self.param.status_interval = 60 * 1 # Skymonitor configuration parameters self.param.incoming_dir = "/data/SKYMON/Incoming" self.param.process1_dir = "/data/SKYMON/Process1" self.param.process2_dir = "/data/SKYMON/Process2" self.param.outgoing_dir = "/data/SKYMON/Outgoing" self.param.exptime = 3.33 # Time to enable automatic image sending. # (Skymonitor camera shutter opens at 6pm) #self.param.autosnap_start = "18:00:00" self.param.autosnap_start = "17:00:00" # Time to disable automatic image sending # (Skymonitor camera shutter closes at 6am) self.param.autosnap_stop = "06:00:00" #elf.param.autosnap_stop = "18:00:00" # This controls automatic generation of FITS files self.ev_auto = threading.Event() # Uncomment this to have Skymonitor fire up in AUTOSEND mode self.ev_auto.set() # SKYMON pulls status in sets. These are the known sets. self.statusset = Bunch.Bunch() self.statusset.DOME = { 'CXWS.TSCV.SHUTTER': 0, } self.statusset.START_EXP = { 'FITS.SBR.UT1-UTC': 0, 'FITS.SBR.TELESCOP': 0, 'FITS.SBR.MAINOBCP': 0, 'CXWS.TSCV.SHUTTER': 0, 'TSCS.AZ': 0, 'FITS.SBR.OUT_HUM': 0, 'FITS.SBR.OUT_TMP': 0, 'FITS.SBR.OUT_WND': 0, 'FITS.SBR.OUT_PRS': 0, 'FITS.SKY.OBSERVER': 0, 'FITS.SKY.OBJECT': 0, 'FITS.SKY.PROP-ID': 0, } self.statusset.PROP_TMPL = [ 'FITS.%3.3s.PROP-ID', 'FITS.%3.3s.OBJECT', 'FITS.%3.3s.OBSERVER', ]
def __init__(self, key, salt=None, iv=None): # For looking up instruments self.insconfig = INSconfig() self._eo = symenc.SymmetricEncryptionObject(key, salt=salt, iv=iv) self.tbl = {}