def parse(self): '''interpret records for PV declarations''' tokenLog = TokenLog() tokenLog.processFile(self.filename_expanded) tok = tokenLog.nextActionable() actions = { #'NAME alias': self._parse_alias, #'NAME info': self._parse_info, 'NAME grecord': self._parse_record, 'NAME record': self._parse_record, } while tok is not None: tk = token_key(tok) if tk in actions: actions[tk](tokenLog) tok = tokenLog.nextActionable()
def parse(self): ''' interpret the template file for database declarations The Python tokenizer makes simple work of parsing database files. The TokenLog class interprets the contents according to a few simple terms such as NAME, OP, COMMENT, NEWLINE. ''' tokenLog = TokenLog() tokenLog.processFile(self.filename_expanded) tok = tokenLog.nextActionable() actions = { 'NAME file': self._parse_file_statement, 'NAME global': self._parse_globals_statement, } while tok is not None: tk = token_key(tok) if tk in actions: actions[tk](tokenLog) tok = tokenLog.nextActionable()
def parse(self): '''analyze this command file''' tokenLog = TokenLog() tokenLog.processFile(self.filename_absolute) lines = tokenLog.lineAnalysis() del lines['numbers'] for _lineNumber, line in sorted(lines.items()): isNamePattern = line['pattern'].startswith( 'NAME' ) tk = token_key(line['tokens'][0]) if isNamePattern or tk == 'OP <': arg0 = line['tokens'][0]['tokStr'] ref = self._make_ref(line['tokens'][0], arg0) if line['tokens'][1]['tokStr'] == '=': # this is a symbol assignment handler = self.kh_symbol handler(arg0, line['tokens'], ref) elif arg0 in self.knownHandlers: # command arg0 has a known handler, call it handler = self.knownHandlers[arg0] handler(arg0, line['tokens'], ref) else: self.kh_shell_command(arg0, line['tokens'], ref)