def __init__(self, logger): self.logger = logger self.parser = CommandParser.CommandParser() self.para_lexer = para_lexer.paraScanner(logger=logger, debug=0) self.para_parser = paraParser(self.para_lexer, logger=logger, debug=0) self.para = Bunch.threadSafeBunch() self.userRegMap = Bunch.threadSafeBunch() self.systemRegMap = Bunch.threadSafeBunch() self.commandRegMap = Bunch.threadSafeBunch()
def p_error(arg): global errors, errinfo errors += 1 if isinstance(arg, LexToken): errstr = ("Parse error at line %d, token %s ('%s')" % ( arg.lineno, arg.type, str(arg.value))) #print errstr errinfo.append(Bunch.Bunch(lineno=arg.lineno, errstr=errstr, token=arg)) else: errstr = ("Parse error: %s" % str(arg)) #print errstr errinfo.append(Bunch.Bunch(lineno=0, errstr=errstr, token=arg))
def t_error(self, t): errstr = ("Scan error at line %d, character ('%s')" % (t.lineno, t.value[0])) #print errstr self.errinfo.append( Bunch.Bunch(lineno=t.lineno, errstr=errstr, token=t)) self.errors += 1
def __init__(self, sk_basedir, logger=None): if logger: self.logger = logger else: self.logger = logging.getLogger('sk.skbank') # Base directory of sk files self.sk_basedir = sk_basedir self.cache = Bunch.threadSafeBunch() lexer = sk_lexer.skScanner(logger=self.logger, debug=0, lextab='scan1_tab') self.param_parser = sk_parser.paramParser(lexer, logger=self.logger) self.param_parser.build(debug=0, tabmodule='param_parser_tab') self.param_parser.reset() lexer = sk_lexer.skScanner(logger=self.logger, debug=0, lextab='scan2_tab') self.ope_parser = sk_parser.opeParser(lexer, logger=self.logger) self.ope_parser.build(debug=0, tabmodule='ope_parser_tab') self.ope_parser.reset() lexer = sk_lexer.skScanner(logger=self.logger, debug=0, lextab='scan3_tab') self.sk_parser = sk_parser.skParser(lexer, logger=self.logger) self.sk_parser.build(debug=0, tabmodule='sk_parser_tab') self.sk_parser.reset()
def __init__(self, logger, env, ev_quit=None): super(SCEXAO, self).__init__() self.logger = logger self.env = env self.ev_quit = ev_quit self.insname = 'SCEXAO' # Holds our link to OCS delegate object self.ocs = None # Thread-safe bunch for storing parameters read/written # by threads executing in this object self.param = Bunch.threadSafeBunch() # Interval between status packets (secs) self.param.status_interval = 60.0 # status we are interested in self.s_aliases = [ 'FITS.SBR.RA', 'FITS.SBR.DEC', 'FITS.SBR.EQUINOX', 'FITS.SBR.HA', 'FITS.SBR.AIRMASS', 'TSCS.AZ', 'TSCS.EL', # add more here if you like ]
def addvarrefs(lineno, line): offset = 0 match = regex_varref.match(line) while match: pfx, varref, sfx = match.groups() #print "1) %d pfx=(%s) varref=(%s) sfx=(%s)" % ( # lineno, pfx, varref, sfx) offset += len(pfx) start = offset offset += len(varref) end = offset varref = varref.upper()[1:] refset.add(varref) bnch = Bunch.Bunch(varref=varref, lineno=lineno, text=line, start=start, end=end) reflist.append(bnch) try: res = varDict[varref] except KeyError: badset.add(varref) badlist.append(bnch) match = regex_varref.match(sfx)
def __init__(self, logger, env, ev_quit=None): super(PFS, self).__init__() self.logger = logger self.env = env # Convoluted but sure way of getting this module's directory self.mydir = os.path.split(sys.modules[__name__].__file__)[0] if not ev_quit: self.ev_quit = threading.Event() else: self.ev_quit = ev_quit # Holds our link to OCS delegate object self.ocs = None # We define our own modes that we report through status # to the OCS self.mode = 'default' # Thread-safe bunch for storing parameters read/written # by threads executing in this object self.param = Bunch.threadSafeBunch() # Interval between status packets (secs) self.param.status_interval = 10.0 self.frameType = 'A'
def parse_opebuf(buf): #(hdrbuf, prmbuf, cmdbuf, startline) = sk_lexer.get_skparts(buf) cmdbuf = buf prmbuf = "" startline = 1 (errors, ast, errinfo) = parse(cmdbuf, startline=startline) if errors > 0: for errbnch in errinfo: errbnch.verbose = sk_lexer.mk_error(cmdbuf, errbnch, 10) # Make list of default params params = {} patterns = {} for line in prmbuf.split('\n'): line = line.strip() if '=' in line: try: (var, val) = line.split('=') var = var.strip().upper() val = val.strip() if not var.startswith('*'): params[var] = val else: patterns[var] = val.split(',') except: raise opeParseError("Default parameter section does not match expected format") res = Bunch.Bunch(ast=ast, errors=errors, errinfo=errinfo, params=params, patterns=patterns) return res
def __init__(self, logger, env, ev_quit=None): super(SIMCAM, self).__init__() self.logger = logger self.env = env # Convoluted but sure way of getting this module's directory self.mydir = os.path.split(sys.modules[__name__].__file__)[0] if not ev_quit: self.ev_quit = threading.Event() else: self.ev_quit = ev_quit # Holds our link to OCS delegate object self.ocs = None # We define our own modes that we report through status # to the OCS self.mode = 'default' # Thread-safe bunch for storing parameters read/written # by threads executing in this object self.param = Bunch.threadSafeBunch() # Interval between status packets (secs) self.param.status_interval = 10.0
def parse(self, buf, startline=1): # Initialize module level error variables self.reset(lineno=startline) try: ast = self.parser.parse(buf, lexer=self.lexer) #print("errors=%d, AST=%s" % (self.errors, ast)) ## # !!! HACK !!! MUST FIX PARSER!!! ## try: ## print(self.errors, "errors") ## self.errinfo.pop() ## self.errors -= 1 ## ast = self.result ## except IndexError: ## pass ## print(ast) except Exception as e: # capture traceback? Yacc tracebacks aren't that useful errstr = 'ERROR: %s' % (str(e)) ast = ASTNode(errstr) # verify errors>0 ??? #assert(self.errors > 0) if self.errors == 0: self.errors += 1 self.errinfo.append( Bunch.Bunch(lineno=self.lexer.lexer.lineno, errstr=errstr, token=None)) self.logger.error(errstr) return (self.errors, ast, self.errinfo)
def _get_params(self, ast_params): assert ast_params.tag == 'param_list', \ SkCompileError("Malformed parameter AST: %s" % str(ast_params)) paramList = [] paramDict = {} statusDict = {} aliasList = [] res = Bunch.Bunch(paramList=paramList, paramDict=paramDict, statusDict=statusDict, aliasList=aliasList) for ast_kvp in ast_params.items: assert (ast_kvp.tag == 'key_value_pair') and ( len(ast_kvp.items) == 2), \ SkCompileError("Malformed key value pair AST: %s" % ( str(ast_kvp))) # If this item is a status alias, add it to the dict of status # values that will need to be fetched (varname, val_ast) = ast_kvp.items if self.is_aliasref(val_ast): statusAlias = val_ast.items[0] statusDict[statusAlias] = '##NODATA##' value = None aliasList.append((varname, statusAlias)) else: value = self.get_value(val_ast) paramList.append(varname) paramDict[varname] = value return res
def get_vars(plist, include_dirs): """Build substitution dictionary from the <Parameter_List> section of an OPE file.""" lines = plist.split('\n') substDict = Bunch.caselessDict() while len(lines) > 0: line = lines.pop(0) line = line.strip() match = load_regex.match(line) if match: prepend_prm(lines, match.group(1), include_dirs) continue # convert to uc line = toupper(line) if line.startswith('#') or line.startswith('*') or (len(line) == 0): continue if '=' in line: idx = line.find('=') var = line[0:idx].strip() val = line[idx+1:].strip() substDict[var] = val return substDict
def __init__(self, root, logger, rohost, hostname): # holds widgets of interest self.w = Bunch.Bunch() self.w.root = root self.logger = logger self.process = Process(logger) self.hostname = hostname self.rohost = rohost self.propid = None self.propfile = None self.__init_propid_entry() self.svcname = get_svcname(self.hostname, self.propid, self.logger) #self.__set_propfile() #self.title_suffix = "\u3041\u306A \u3081\u306C" # あな めぬ title_suffix = "あなめにゅー" self.title_suffix = title_suffix.encode(encoding="utf-8").decode() self.action_list = [ ('FOCAS', self.launch_focas), ('IRCS', self.launch_ircs), ('HDS', self.launch_hds), ('MOIRCS', self.launch_moircs), ]
def parse_buf(self, buf, name=''): (paramList, paramDict) = self.parse(buf) # union together all the possible status aliases that could be # used in this para aliases = set([]) for paramDef in paramDict.values(): aliases.update(paramDef.aliases) res = Bunch.Bunch(name=name, paramList=paramList, paramDict=Bunch.caselessDict(paramDict), paramAliases=aliases, errors=self.errors, errinfo=self.errinfo) return res
def scan_buf(self, buf): tokens = self.tokenize(buf) res = Bunch.Bunch(tokens=tokens, errors=self.errors, errinfo=self.errinfo) return res
def parse_skbuf(self, buf): # Get the constituent parts of a skeleton file: # header, parameter list, command part (hdrbuf, prmbuf, cmdbuf, startline) = sk_common.get_skparts(buf) # print("header", hdrbuf) # print("params", prmbuf) # print("commands", cmdbuf) # Get the header params try: header, _2, _3 = collect_params(hdrbuf) except Exception as e: # don't let parsing errors of the header hold us back # header is not really used for anything important header = {} # Make a buffer of the default params in an easily parsable form params, param_lst, patterns = collect_params(prmbuf) parambuf = ' '.join(param_lst) #print(parambuf) # Parse default params into an ast. (errors, ast_params, errinfo) = self.parse_params(parambuf) #print("ast_params:", ast_params.printAST()) # This will hold the results res = Bunch.Bunch(errors=errors, errinfo=errinfo, header=header) # make readable errors if errors > 0: #print("ERRINFO = ", errinfo) for errbnch in errinfo: errbnch.verbose = sk_common.mk_error(parambuf, errbnch, 1) # parse the command part (errors, ast_cmds, errinfo) = self.parse(cmdbuf, startline=startline) # Append errinfo together res.errors += errors res.errinfo.extend(errinfo) # make readable errors for errbnch in errinfo: errbnch.verbose = sk_common.mk_error(cmdbuf, errbnch, 10) res.params = params res.patterns = patterns # Finally, glue the params AST and the commands AST together to make # "skeleton" node res.ast = ASTNode("skeleton", ast_params, ast_cmds) # return a bundle of these objects return res
def __init__(self, name, logger, ev_quit=None, threadPool=None, numthreads=15, outlimit=5): """ Constructor for the PubSubBase class. name pubsub name logger logger to be used for any diagnostic messages threadPool optional, threadPool for serving PubSub activities numthreads if a threadPool is NOT furnished, the number of threads to allocate """ super(PubSubBase, self).__init__() self.logger = logger self.name = name self.numthreads = numthreads self.outlimit = outlimit # Handles to subscriber remote proxies self._partner = {} # Defines aggregate channels self.aggregates = Bunch.threadSafeBunch() # Termination event if not ev_quit: ev_quit = threading.Event() self.ev_quit = ev_quit # If we were passed in a thread pool, then use it. If not, # make one. Record whether we made our own or not. if threadPool != None: self.threadPool = threadPool self.mythreadpool = False else: self.threadPool = Task.ThreadPool(logger=self.logger, ev_quit=self.ev_quit, numthreads=self.numthreads) self.mythreadpool = True # For task inheritance: self.tag = 'PubSub' self.shares = ['logger', 'threadPool'] # For handling subscriber info self._lock = threading.RLock() self._sub_info = {} # number of seconds to wait before unsubscribing a subscriber # who is unresponsive self.failure_limit = 60.0 self.cb_subscr_cnt = 0 self.outbound_sem = threading.BoundedSemaphore(self.outlimit)
def scan_skbuf(self, buf): (hdrbuf, prmbuf, cmdbuf, startline) = sk_common.get_skparts(buf) (errors, tokens, errinfo) = self.tokenize(cmdbuf, startline=startline) if errors > 0: for errbnch in errinfo: errbnch.verbose = sk_common.mk_error(cmdbuf, errbnch, 10) res = Bunch.Bunch(tokens=tokens, errors=errors, errinfo=errinfo) return res
def p_error(self, arg): self.errors += 1 if isinstance(arg, LexToken): errstr = ("Parse error at line %d, token %s ('%s')" % (arg.lineno, arg.type, str(arg.value))) self.errinfo.append( Bunch.Bunch(lineno=arg.lineno, errstr=errstr, token=arg)) self.logger.error(errstr) # ? Try to recover to some sensible state self.parser.errok() else: errstr = ("Parse error: %s" % str(arg)) #print errstr self.errinfo.append(Bunch.Bunch(lineno=0, errstr=errstr, token=arg)) self.logger.error(errstr) # ? Try to recover to some sensible state self.parser.restart()
def loadParaBuf(self, parakey, paraFileBuf): """Load a para definition from a buffer. """ # Create paramDefs and validator from parsing the buffer. # (see other modules in this directory for details) bnch = self.para_parser.parse_buf(paraFileBuf, name=str(parakey)) validator = ParameterHandler(bnch, logger=self.logger) # Store paramdefs and validator under the passed in parakey # (e.g. parakey might be ("TSC", "AG_PARTS") ) self.para[parakey] = Bunch.Bunch(paramDefs=bnch.paramDict, paramList=bnch.paramList, paramAliases=bnch.paramAliases, validator=validator)
def _decode_params(self, ast, eval): assert ast.tag == 'param_list', ASTerr(ast) res = Bunch.Bunch(caseless=True) # Iterate over subitems, each of which should be a var=exp pair. # Decode all value expressions and assign results to variables. for keyval in ast.items: assert keyval.tag == 'key_value_pair', ASTerr(keyval) (var, val_ast) = keyval.items assert type(var) == strtype, "variable is not a string" new_val_ast = self.decode(val_ast, eval) res[var] = new_val_ast return res
def parse_opebuf(self, opebuf): # Get the constituent parts of a skeleton file: # header, parameter list, command part (hdrbuf, prmbuf, cmdbuf, startline) = sk_common.get_opeparts(opebuf) (errors, ast_params, errinfo) = self.parse_opecmd(cmdbuf, startline=startline) # This will hold the results res = Bunch.Bunch(errors=errors, errinfo=errinfo) # make readable errors if errors > 0: #print("ERRINFO = ", errinfo) for errbnch in errinfo: errbnch.verbose = sk_common.mk_error(cmdbuf, errbnch, 1) return res
def _optomize_exp(self, astlist, info=None, indent=4): s_indent = ' ' * indent if info == None: info = Bunch.Bunch(aliasList=set([])) for ast_exp in astlist: self._skcompile_exp(ast_exp, info) # If there are status aliases in the expression, get them all in # one go now if len(info.aliasList) > 0: aliasDict = {}.fromkeys(info.aliasList, None) self.buf.write("%s_stat = self.fetch(%s)\n" % (s_indent, str(aliasDict))) srcList = ["_stat['%s']" % alias for alias in info.aliasList] info.aliasMap = dict(list(zip(info.aliasList, srcList))) return info
def skcompile_exp(self, ast): self.logger.debug("skcompile_exp: ast=%s" % str(ast)) info = Bunch.Bunch() return self._skcompile_exp(ast, info)
def store_userReg(self, parakey, params): self.userRegMap[parakey] = Bunch.caselessDict({}); self.userRegMap[parakey].update(params);
def cleanup(options, args, logger): """Runs a cleanup on the directory specified in options.fitsdir. Stops when disk usage drops below the low water mark threshold specified by options.lowater """ files = recursive_glob(options.fitsdir, "*.fits") # First pass. Record information about files in FITS dir. logger.info("Cleanup PASS 1: information gathering.") fitslist = [] cur_time = time.time() for fitspath in files: logger.debug("Examining file '%s'" % fitspath) # If this is not a .fits file then move on (pfx, ext) = os.path.splitext(fitspath) if not re.match(r'^\.fits$', ext, re.IGNORECASE): logger.info("No FITS extension: '%s'" % fitspath) continue # Assume: no age age = 0 # Record modification time of file try: stat = os.stat(fitspath) age = stat.st_mtime except OSError as e: logger.error("Error stat(%s): %s" % (fitspath, str(e))) continue # Skip files that don't look like Subaru frames try: res = getFrameInfoFromPath(fitspath) except Exception as e: logger.info("Not a Subaru FITS frame: '%s': %s" % ( fitspath, str(e))) continue # Skip files that are younger than the minimum required age delta = cur_time - age if not (delta > keep_threshold_age): filedate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(age)) logger.info("Skipping too young file: (%s) '%s'" % ( filedate, fitspath)) continue bnch = Bunch.Bunch(fitspath=fitspath, age=age) fitslist.append(bnch) # Sort by age fitslist.sort(key=lambda x: x.age) #print(fitslist) delete(options, logger, fitslist)
def __init__(self, params): global count self.variable_map = Bunch.caselessDict(params) self.count = count count += 1
def __init__(self): self.special_keys = ['SYSTEM', 'USER', 'COMMAND', 'STATUS'] rib = Bunch.caselessDict() self.ribs = [rib]
def store_commandReg(self, parakey, params): self.commandRegMap[parakey] = Bunch.caselessDict({}) self.commandRegMap[parakey].update(params);
def check_ope(buf, include_dirs=None): """ Parse an OPE file and return a Bunch of information about it. Returns a bunch with several items defined: reflist: a list of all variable references (each is a bunch) refset: a set of all variable references (just variable names) badlist: a list of all undefined variable references (each is a bunch) badset: a set of all undefined variable references (just variable names) taglist: a list of all tag lines encountered (each is a bunch) """ if include_dirs == None: include_dirs = [] # compute the variable dictionary varDict = get_vars_ope(buf, include_dirs) refset = set([]) badset = set([]) reflist = [] badlist = [] taglist = [] badcoords = [] def addvarrefs(lineno, line): offset = 0 match = regex_varref.match(line) while match: pfx, varref, sfx = match.groups() #print "1) %d pfx=(%s) varref=(%s) sfx=(%s)" % ( # lineno, pfx, varref, sfx) offset += len(pfx) start = offset offset += len(varref) end = offset varref = varref.upper()[1:] refset.add(varref) bnch = Bunch.Bunch(varref=varref, lineno=lineno, text=line, start=start, end=end) reflist.append(bnch) try: res = varDict[varref] except KeyError: badset.add(varref) badlist.append(bnch) match = regex_varref.match(sfx) lineno = 0 for line in buf.split('\n'): lineno += 1 sline = line.strip() if sline.startswith('###'): taglist.append(Bunch.Bunch(lineno=lineno, text=line, tags=['comment3'])) elif sline.startswith('##'): taglist.append(Bunch.Bunch(lineno=lineno, text=line, tags=['comment2'])) elif sline.startswith('#'): taglist.append(Bunch.Bunch(lineno=lineno, text=line, tags=['comment1'])) else: try: check_coords(line) except OPEerror as e: bnch = Bunch.Bunch(errstr=str(e), lineno=lineno, text=line) badcoords.append(bnch) addvarrefs(lineno, line) return Bunch.Bunch(refset=refset, reflist=reflist, badset=badset, badlist=badlist, taglist=taglist, vardict=varDict, badcoords=badcoords)
def push(self, params): rib = Bunch.caselessDict(params) self.ribs.insert(0, rib)