def _parse_record(self, tokenLog): tok = tokenLog.nextActionable() ref = self._make_ref(tok) rtype, rname = tokenLog.tokens_to_list() # just parsing, no need for macros now record_object = record.Record(self, rtype, rname, ref) self.record_list.append(record_object) tok = tokenLog.nextActionable() if token_key(tok) == 'OP {': tok = tokenLog.nextActionable() # get record's field definitions while token_key(tok) != 'OP }': ref = self._make_ref(tok, tok['tokStr']) tk = token_key(tok) if tk == 'NAME field': tok = tokenLog.nextActionable() ref = self._make_ref(tok) field, value = parse_bracketed_macro_definitions(tokenLog) record_object.addFieldPattern(field, value.strip('"'), self, ref) #tok = tokenLog.previous() # backup before advancing below elif tk == 'NAME alias': self._parse_alias(tokenLog, record_object, ref) elif tk == 'NAME info': self._parse_info(tokenLog, record_object, ref) else: tok = tokenLog.getCurrentToken() msg = str(ref) msg += ' unexpected content: |%s|' % str(tok['tokStr']) msg += ' in file: ' + str(ref) raise RuntimeError(msg) tok = tokenLog.nextActionable()
def parse(self): '''interpret records for PV declarations''' tokenLog = TokenLog() tokenLog.processFile(self.filename_expanded) tok = tokenLog.nextActionable() actions = { #'NAME alias': self._parse_alias, #'NAME info': self._parse_info, 'NAME grecord': self._parse_record, 'NAME record': self._parse_record, } while tok is not None: tk = token_key(tok) if tk in actions: actions[tk](tokenLog) tok = tokenLog.nextActionable()
def parse(self): ''' interpret the template file for database declarations The Python tokenizer makes simple work of parsing database files. The TokenLog class interprets the contents according to a few simple terms such as NAME, OP, COMMENT, NEWLINE. ''' tokenLog = TokenLog() tokenLog.processFile(self.filename_expanded) tok = tokenLog.nextActionable() actions = { 'NAME file': self._parse_file_statement, 'NAME global': self._parse_globals_statement, } while tok is not None: tk = token_key(tok) if tk in actions: actions[tk](tokenLog) tok = tokenLog.nextActionable()
def parse(self): '''analyze this command file''' tokenLog = TokenLog() tokenLog.processFile(self.filename_absolute) lines = tokenLog.lineAnalysis() del lines['numbers'] for _lineNumber, line in sorted(lines.items()): isNamePattern = line['pattern'].startswith( 'NAME' ) tk = token_key(line['tokens'][0]) if isNamePattern or tk == 'OP <': arg0 = line['tokens'][0]['tokStr'] ref = self._make_ref(line['tokens'][0], arg0) if line['tokens'][1]['tokStr'] == '=': # this is a symbol assignment handler = self.kh_symbol handler(arg0, line['tokens'], ref) elif arg0 in self.knownHandlers: # command arg0 has a known handler, call it handler = self.knownHandlers[arg0] handler(arg0, line['tokens'], ref) else: self.kh_shell_command(arg0, line['tokens'], ref)
def _parse_globals_statement(self, tokenLog): ''' support the *globals* statement in a template file This statement was new starting with EPICS base 3.15 example:: global { P=12ida1:,SCANREC=12ida1:scan1 } ''' ref = self._make_ref(tokenLog.getCurrentToken(), 'global macros') # TODO: How to remember where the globals were defined? tok = tokenLog.nextActionable() if token_key(tok) == 'OP {': kv = parse_bracketed_macro_definitions(tokenLog) ref = self._make_ref(tok, kv) # TODO: Do something with ref self.macros.setMany(**kv) else: msg = '(%s,%d,%d) ' % (self.filename, tok['start'][0], tok['start'][1]) msg += 'missing "{" in globals statement' raise DatabaseTemplateException(msg)
def _parse_file_statement(self, tokenLog): ''' support the *file* statement in a template file example:: file "$(SSCAN)/sscanApp/Db/scanParms.db" ''' ref = self._make_ref(tokenLog.getCurrentToken(), 'database file') # TODO: Do something with ref # if no enclosing quotes, '$" is an ERRORTOKEN skip_list = 'COMMENT NEWLINE ENDMARKER INDENT DEDENT'.split() tok = tokenLog.nextActionable(skip_list) # move past the "file" command tk = token_key(tok) text = '' while tk != 'OP {': text += tok['tokStr'] tok = tokenLog.nextActionable(skip_list) tk = token_key(tok) dbFileName = utils.strip_outer_quotes(text.strip()) fname = self.macros.replace(dbFileName) tok = tokenLog.nextActionable() # When there is a "pattern" statement, # the macro labels are given first, # then (later) values in each declaration (usually multiple sets) pattern_keys = [] if token_key(tok) == 'NAME pattern': tok = tokenLog.nextActionable() pattern_keys = tokenLog.tokens_to_list() tok = tokenLog.nextActionable() # skip past the closing } while token_key(tok) != 'OP }': # define the macros for this set pattern_macros = macros.Macros() if len(pattern_keys) > 0: # The macro labels were defined in a pattern statement value_list = tokenLog.tokens_to_list() kv = dict(zip(pattern_keys, value_list)) pattern_macros.setMany(**kv) tok = tokenLog.nextActionable() else: # No pattern statement, macro labels are defined with the values tok = tokenLog.getCurrentToken() kv = tokenLog.getKeyValueSet() pattern_macros.setMany(**kv) tok = tokenLog.nextActionable() ref = self._make_ref(tokenLog.getCurrentToken()) # TODO: work out how to get the path into the next statement arg_text = '("' + dbFileName + '")' if len(pattern_macros) > 0: arg_text = '("' + dbFileName arg_text += ', "' arg_text += ','.join([k+'='+v for k, v in sorted(pattern_macros.items())]) arg_text += '")' pattern_macros.setMany(**self.macros.db) cmd = command_file.Command(self, '(dbLoadRecords)', 'path unknown', arg_text, ref, **pattern_macros.db) self.commands.append(cmd) dbg = database.Database(self, fname, ref, **pattern_macros.db) self.database_list.append(dbg)