self._preparser.errok() else: self._errors.append(error.Error('the unexpected token EOF', len(self._code))) def _process_list(name, node): items = [] if len(node) > 1: if node[1] is not None: items = node[1].children + [node[2]] else: items = [node[2]] node[0] = ast_node.AstNode(name, children=items) def _process_alias(name, node, item=1): node[0] = ast_node.AstNode(name, value=node[item]) if __name__ == '__main__': import read_code import lexer code = read_code.read_code() specific_lexer = lexer.Lexer() preparser = Preparser(specific_lexer) preast = preparser.preparse(code) print(preast) for some_error in specific_lexer.get_errors() + preparser.get_errors(): some_error.detect_position(code) print(some_error)
def thread2(): """Continuously monitor the touch sensor and convert the Morse code sequence into a letter index which gets published. """ read_code(lambda l: c.publish(topic, bytes((l, ))))
import read_code import lexer import ast_token_encoder import json import preparser import parser import builtin_functions import evaluate def _process_errors(errors, code): for some_error in errors: some_error.detect_position(code) sys.stderr.write(str(some_error) + '\n') options = process_options.process_options() code = read_code.read_code(options.script) specific_lexer = lexer.Lexer() if options.target == 'tokens': tokens = specific_lexer.tokenize(code) _process_errors(specific_lexer.get_errors(), code) print(json.dumps(tokens, cls=ast_token_encoder.AstTokenEncoder)) sys.exit(1 if specific_lexer.get_errors() else 0) specific_preparser = preparser.Preparser(specific_lexer) preast = specific_preparser.preparse(code) errors = specific_lexer.get_errors() + specific_preparser.get_errors() if options.target == 'preast': _process_errors(errors, code) print(preast)
def thread2(): """Continuously monitor the touch sensor and convert the Morse code sequence into a letter index which gets added to the letter queue. """ read_code()