class ScannerTestCase(TestCase): def setUp(self): self.servers = [Mock(), Mock()] self.scanner = Scanner(servers=self.servers) @patch('scanner.api.user.add_user') def test_block_add(self, add_user): self.scanner.add_user('mike', 'test') self.assertFalse(add_user.called) @patch('scanner.api.user.add_user') def test_unblock_add(self, add_user): scanner = Scanner(servers=self.servers, add_users=True) scanner.add_user('mike', 'test') add_user.assert_called_once_with('mike', 'test') @patch('scanner.api.user.remove_user') def test_block_remove(self, remove_user): self.scanner.remove_user('mike', 'test') self.assertFalse(remove_user.called) @patch('scanner.api.user.remove_user') def test_unblock_remove(self, remove_user): scanner = Scanner(servers=self.servers, remove_users=True) scanner.remove_user('mike', 'test') remove_user.assert_called_once_with('mike', 'test')
def debug_scan(): from scanner.scanner import Scanner s = Scanner(debug=True, add_users=True, remove_users=True, add_keys=True, remove_keys=True) results = s.scan()
def getinfo(self, info): result = {} port_list = [self.get_arguments('ports'), info.split(',')][len(info) > 0] if (len(port_list) == 0): result = Scanner.getallservices() else: result = Scanner.getsomeservices(port_list) return result
def post(self): """Kick off a scan manually""" s = Scanner(debug=False, add_users=True, remove_users=True, add_keys=True, remove_keys=True) results = s.scan() return '', HTTP_STATUS.CREATED
def __init__(self, filename, src): self.file = token.File(filename) self.scanner = Scanner(self.file, src) self.pos = None self.tok = None self.lit = None self.next_token()
def generate_protocol(input_xml, output_path): # We'll directly import the scanner, so we can build the protocol before # the cffi module has been compiled from scanner.scanner import Scanner # Ensure the protocol dir exists if not os.path.isdir(output_path): os.makedirs(output_path, 0o775) # Run and scan the xml file scanner = Scanner(input_xml) scanner.scan() scanner.output(output_path)
def capture(self): self.ids['camera'].play = False camera = self.ids['camera'] print(camera.size, Window.size) path_to_img = os.path.join( os.path.abspath('.'), "src/IMG_{}.png".format(time.strftime("%Y%m%d_%H%M%S"))) camera.export_to_png(path_to_img) print("Captured", path_to_img) Scanner().scan(path_to_img) self.layout.new_page(ContourPage(self.layout, path_to_img))
def run(source: str, interpreter: Interpreter) -> None: scanner = Scanner(source) scanner.scan_tokens() if scanner.had_error: print(f'Error scanning') return parser = Parser(scanner.tokens) statements = parser.parse() if parser.had_error: print(f'Error parsing') for err in parser.errors: parser_error_print(err, scanner.source_lines) else: resolve_statements(statements) try: interpreter.interpret(statements) except LoxRuntimeError as err: print(f'{err.msg}')
def main(): print("\nStarting compiler...\n\n") file = None try: if len(sys.argv) > 1: file = open(sys.argv[1], 'r') else: file = open('tests/program.po', 'r') except IOError: print("Error: File does not appear to exist.") return scanner = Scanner(file) scanner.scan() tokens = scanner.tokens_list print(*tokens, sep='\n') print("\n") parser = LL1(tokens) parser.parse()
def build_scanner(path): start = DFANode(actions.error_gen) number_regex(start) id_regex(start) symbol_regex(start) comment_regex(start) whitespace_regex(start) language = Edge().include('0', '9').include('a', 'z').include('A', 'Z') \ .include('/').include('*') \ .include(':', '<').include(',').include('(', ')').include('[').include(']').include('{').include('}') \ .include('+').include('-').include('=') \ .include('\t', '\r').include(' ').include(chr(26)) return Scanner(start, BufferReader(path, 30), language)
class TestParser(unittest.TestCase): def setUp(self): self.file = open('tests/program-old.po', 'r') self.scanner = Scanner(self.file) self.scanner.scan() self.tokens = self.scanner.tokens_list self.parser = LL1(self.tokens) def tearDown(self): self.file.close() def test_eoftoken(self): self.assertEqual(self.tokens[-1].token, TokenEnum.EOF) def test_keywords(self): self.assertEqual(len(self.parser.keywords), 8) def test_index(self): self.assertEqual(self.parser.index, 0) def test_parser(self): result = self.parser.parse() self.assertEqual(result, 1)
def main(argv): config_file = 'config/config.yml' parser = argparse.ArgumentParser() parser.add_argument( "-c", "--config", help="Override the default config file. By default: config/config.yml") args = parser.parse_args() if args.config: if os.access(args.config, os.R_OK): config_file = args.config else: print("ERROR: '", args.config, "' is not readable, please check the path") exit(0) # Init the scanner scanner = Scanner(config_path=config_file) # Start the scanner scanner.start_worker()
def main(): scanner = Scanner( dfa_file=config.DFA_JSON_FILE, keywords_file=config.KEYWORDS_FILE, input_file=config.INPUT_FILE, errors_file=config.LEXICAL_ERRORS_FILE, tokens_file=config.TOKENS_FILE, symbols_file=config.SYMBOL_TABLE_FILE, ) parser = Parser( scanner=scanner, grammar_file=config.GRAMMAR_FILE, tree_file=config.TREE_FILE, errors_file=config.PARSER_ERRORS_FILE, code_file=config.INTERMEDIATE_CODE_FILE, ) parser.run() parser.dump_data()
def main(location, verbose=False): scanner = Scanner(location, verbose=verbose) scanner.start()
def scan(): from scanner.scanner import Scanner s = Scanner() s.scan()
class Parser(object): """ Parser """ def __init__(self, filename, src): self.file = token.File(filename) self.scanner = Scanner(self.file, src) self.pos = None self.tok = None self.lit = None self.next_token() def skip(self, tok): """跳过""" print("skip....", tok) if self.tok == tok: self.next_token() else: self.error("bad skip...", self.tok, tok) # 非预期 def next_token(self): """获取下一个token""" print("next_token...") self.pos, self.tok, self.lit = self.scanner.scan() print('next_token--------------------', self.pos, self.tok, self.lit) # if self.tok == token.EOF: # pass # else: # # # self.next_token() # pass def error(self, *args): """error""" print("error....", self.pos, self.tok, self.lit, args) exit(1) def parse_file(self): """parse_file""" print('parse_file.....') file_node = ast.File() while self.tok == token.tk_newline: self.skip(token.tk_newline) continue while self.tok != token.EOF: node = self.statement() file_node.append_statements(node) # self.next_token() while self.tok == token.tk_newline: self.skip(token.tk_newline) continue if self.tok != token.EOF: self.error("bad end...") # 解析完一个完整的表达式后,没有结束 print("File... >>", file_node) return file_node def statement(self): """语句""" print("statement...") node = self.compound_statement() print("statement...>>", node) return node def compound_statement(self): """ 复合语句 :return: """ print("compound_statement...") node = ast.CompoundStatement() node1 = self.simple_statement() node.append_simple_statement(node1) # self.next_token() while self.tok == token.tk_semicolon: self.skip(token.tk_semicolon) if self.tok == token.EOF: break if self.tok == token.tk_newline: break node1 = self.simple_statement() node.append_simple_statement(node1) print("compound_statement...>>", node) return node def simple_statement(self): """ 简单语句 :return: """ print("simple_statement...") if self.tok == token.kw_print: node = self.print_statement() else: node = self.expression_statement() if isinstance(node, ast.Identifier): if self.tok == token.tk_assign: self.skip(token.tk_assign) node2 = self.expression_statement() node = ast.AssignmentStatement(node, node2) print("simple_statement...>>", node) return node def print_statement(self): """print""" self.skip(token.kw_print) self.skip(token.tk_left_parenthesis) node = self.expression_list() self.skip(token.tk_right_parenthesis) return ast.PrintStatement(node) def expression_list(self): """表达式列表""" node = ast.ExpressionList() node1 = self.expression_statement() node.append_expression(node1) while self.tok == token.tk_comma: self.skip(token.tk_comma) node1 = self.expression_statement() node.append_expression(node1) return node def expression_statement(self): """ 表达式语句 :return: """ print("expression_statement...") node = self.expression() print("expression_statement...>>", node) return node def expression(self): """ 表达式 :return: """ return self.boolean_expression() def boolean_expression(self): """ 布尔表达式 :return: """ return self.or_operation_expression() def or_operation_expression(self): """ or操作表达式 :return: """ node = self.and_operation_expresion() # self.next_token() while self.tok == token.kw_or: self.skip(token.kw_or) node2 = self.and_operation_expresion() node = ast.OrExpression(node, node2) return node def and_operation_expresion(self): """ and操作表达式 :return: """ node = self.not_operation_expression() while self.tok == token.kw_and: self.skip(token.kw_and) node2 = self.not_operation_expression() node = ast.AndExpression(node, node2) return node def not_operation_expression(self): """ not操作表达式 :return: """ if self.tok == token.kw_not: self.skip(token.kw_not) node2 = self.not_operation_expression() node = ast.NotExpression(node2) else: node = self.comparison_expression() # # node = self.comparison_expression() # while self.tok == token.kw_not: # self.skip(token.kw_not) # node2 = self.not_operation_expression() # node = ast.NotExpression(node2) return node def comparison_expression(self): """ 比较运算表达式 :return: """ print("comparison_expression...") node = self.binary_operation_expression() while self.tok in ( token.tk_equal, token.tk_not_equal, token.tk_less_than, token.tk_less_than_or_equal, token.tk_greater_than, token.tk_greater_than_or_equal, token.kw_is, token.kw_in,): tok2 = self.tok self.skip(tok2) print('tok2', tok2) node2 = self.binary_operation_expression() print('tok2', tok2) if tok2 == token.tk_equal: node = ast.EqualExpression(node, node2) elif tok2 == token.tk_not_equal: node = ast.NotEqualExpression(node, node2) elif tok2 == token.tk_less_than: node = ast.LessThanExpression(node, node2) elif tok2 == token.tk_less_than_or_equal: node = ast.LessThanOrEqualExpression(node, node2) elif tok2 == token.tk_greater_than: node = ast.GreaterThanExpression(node, node2) elif tok2 == token.tk_greater_than_or_equal: node = ast.GreaterThanOrEqualExpression(node, node2) elif tok2 == token.kw_is: node = ast.IsExpression(node, node2) elif tok2 == token.kw_in: node = ast.InExpression(node, node2) else: self.error("comparison_expression unexcept tok", tok2) print("comparison_expression...>>", node) return node def binary_operation_expression(self): """ 二元操作运算符 :return: """ print("binary_operation_expression...") node = self.relational_expression() print("binary_operation_expression...>>", node) return node def relational_expression(self): """加减类表达式""" print("relational_expression....") node = self.multiplicative_expression() while self.tok == token.tk_plus or self.tok == token.tk_minus_sign: tok1 = self.tok self.skip(tok1) node2 = self.multiplicative_expression() if tok1 == token.tk_plus: node = ast.PlusExpression(node, node2) elif tok1 == token.tk_minus_sign: node = ast.MinusSignExpression(node, node2) else: Exception("") print("relational_expression...>", node) return node def multiplicative_expression(self): """乘除类表达式""" print("multiplicative_expression....") node = self.unary_expression() while self.tok == token.tk_divide or self.tok == token.tk_star: tok1 = self.tok self.next_token() node2 = self.unary_expression() if tok1 == token.tk_divide: node = ast.DivideExpression(node, node2) elif tok1 == token.tk_star: node = ast.StarExpression(node, node2) else: Exception("multiplicative_expression unexcept tok1 ", tok1) print("multiplicative_expression...>", node) return node def unary_expression(self): """一元表达式""" print("unary_expression....") # node = self.atom() if self.tok == token.tk_plus or self.tok == token.tk_minus_sign: # + - tok1 = self.tok self.next_token() node = self.unary_expression() node = ast.UnaryExpression(tok1, node) else: node = self.atom() print("unary_expression...>", node) return node def atom(self): """原子""" print("atom....", self.tok, self.lit) if self.tok == token.tk_left_parenthesis: # ( self.skip(token.tk_left_parenthesis) node = self.boolean_expression() node = ast.ParenthForm(node) self.skip(token.tk_right_parenthesis) elif self.tok == token.tk_left_middle_bracket: self.skip(token.tk_left_middle_bracket) node = self.expression_list() node = ast.ListDisplay(node) self.skip(token.tk_right_middle_bracket) elif self.tok == token.tk_identifier: node = ast.Identifier(self.pos, self.tok, self.lit) self.next_token() if self.tok == token.tk_left_parenthesis: self.skip(token.tk_left_parenthesis) node2 = self.expression_list() node = ast.Call(node, node2) self.skip(token.tk_right_parenthesis) elif self.tok == token.tk_string: node = ast.StringLiteral(self.pos, self.tok, self.lit) self.next_token() elif self.tok == token.tk_floatnumber: node = ast.FloatNumber(self.pos, self.tok, self.lit) self.next_token() elif self.tok == token.tk_integer: node = ast.Integer(self.pos, self.tok, self.lit) self.next_token() else: node = None self.error('atom unexcept ', self.tok, self.lit, self.tok) return node
class TestScanner(unittest.TestCase): def setUp(self): self.file = open('tests/program-old.po', 'r') self.scanner = Scanner(self.file) def tearDown(self): self.file.close() def test_get(self): self.assertEqual(self.scanner.get(), 'I') self.scanner.peek() self.assertEqual(self.scanner.get(), 'N') self.assertEqual(self.scanner.get(), 'T') self.assertEqual(self.scanner.get(), 'E') self.assertEqual(self.scanner.get(), 'I') self.scanner.peek() self.assertEqual(self.scanner.get(), 'R') self.assertEqual(self.scanner.get(), 'O') def test_peek(self): self.assertEqual(self.scanner.peek(), 'I') self.assertEqual(self.scanner.peek(), 'I') self.assertEqual(self.scanner.peek(), 'I') self.scanner.get() self.assertEqual(self.scanner.peek(), 'N') self.assertEqual(self.scanner.peek(), 'N') def test_getline(self): self.assertEqual(self.scanner.get_line(), 'INTEIRO: numero1;\n') self.assertEqual(self.scanner.get_line(), '\n') def test_peekline(self): self.assertEqual(self.scanner.peek_line(), 'INTEIRO: numero1;\n') self.assertEqual(self.scanner.peek_line(), 'INTEIRO: numero1;\n') def test_islinecomment(self): self.assertEqual(self.scanner.is_linecomment(), False) def test_eof(self): while not self.scanner.is_eof(): self.scanner.get() self.assertTrue(self.scanner.is_eof()) def test_scan(self): tokens = self.scanner.scan() self.assertEqual(len(tokens[TokenEnum.TYPE]), 4) self.assertEqual(len(tokens[TokenEnum.INTEGER]), 5) self.assertEqual(len(tokens[TokenEnum.COLON]), 4) self.assertEqual(len(tokens[TokenEnum.COMMA]), 1) self.assertEqual(len(tokens[TokenEnum.IDENTIFIER]), 15) self.assertEqual(len(tokens[TokenEnum.SEMICOLON]), 9) self.assertEqual(len(tokens[TokenEnum.ASSIGNMENT]), 6) self.assertEqual(len(tokens[TokenEnum.KEYWORD]), 3) self.assertEqual(len(tokens[TokenEnum.TEXT]), 1) self.assertEqual(len(tokens[TokenEnum.FLOAT]), 1) self.assertEqual(len(tokens[TokenEnum.ARITHMETIC]), 2) def test_token_list(self): self.scanner.scan() self.assertEqual(len(self.scanner.tokens_list), 51)
def test_unblock_add(self, add_user): scanner = Scanner(servers=self.servers, add_users=True) scanner.add_user('mike', 'test') add_user.assert_called_once_with('mike', 'test')
import sys def merge_dicts(*dict_args): result = {} for dictionary in dict_args: result.update(dictionary) return result if __name__ == "__main__": source_code_path = None try: source_code_path = sys.argv[1] except IndexError as ie_e: raise ie_e print('ERROR: You must provide a file to scan as an argument to the interpreter') sys.exit() scanned_program = Scanner(source_code_path) print('Program Scanned.') parser = Parser(scanned_program) print('Program Parsed.') print(parser.postfix_list) interpreter_dict = merge_dicts( dict((v,k) for k,v in scanned_program.identifier_dict.items()), Grammar.flattenToValueKeyed() ) print(interpreter_dict)
import os import sys sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..", "lib")) from scanner.scanner import Scanner SPLUNK_HOME = os.environ.get('SPLUNK_HOME', '') LOG_PATH = os.path.join(SPLUNK_HOME, 'var', 'log', 'vulners-lookup') s = Scanner(log_level="DEBUG", log_path=LOG_PATH) s.run()
def setUp(self): self.file = open('tests/program-old.po', 'r') self.scanner = Scanner(self.file) self.scanner.scan() self.tokens = self.scanner.tokens_list self.parser = LL1(self.tokens)
def __init__(self, scanner: Scanner): self.parser = yacc.yacc() self.lexer = scanner.get_lexer()
def setUp(self): self.file = open('tests/program-old.po', 'r') self.scanner = Scanner(self.file)
def test_unblock_remove(self, remove_user): scanner = Scanner(servers=self.servers, remove_users=True) scanner.remove_user('mike', 'test') remove_user.assert_called_once_with('mike', 'test')
from scanner.scanner import Scanner s = Scanner('/Users/amirhasan/desktop/compiler/project/compiler/input.txt') while True: t = s.next() if t: print(t) else: break
def main(): parser = argparse.ArgumentParser() parser.add_argument("-sT", "--tcp", action="store_true", help="TCP Connect scan") parser.add_argument("-sS", "--syn", action="store_true", help="TCP Syn scan") parser.add_argument("-sN", "--null", action="store_true", help="TCP Null scan") parser.add_argument("-sF", "--fin", action="store_true", help="TCP Fin scan") parser.add_argument("-sX", "--xmas", action="store_true", help="TCP XMas scan") parser.add_argument("-sA", "--ack", action="store_true", help="TCP Ack scan") parser.add_argument("-sU", "--udp", action="store_true", help="UDP scan") parser.add_argument("-p", "--ports", nargs="*") parser.add_argument("target") args = parser.parse_args(sys.argv[1:]) if args.ports and args.ports != "top": ports = utils.parse_ports(args.ports) else: ports = utils.get_top_ports(1000) scanner = Scanner(args.target, ports) if args.tcp: scanner.scan(technique="tcp") elif args.syn: scanner.scan(technique="syn") elif args.null: scanner.scan(technique="null") elif args.fin: scanner.scan(technique="fin") elif args.xmas: scanner.scan(technique="xmas") elif args.ack: scanner.scan(technique="ack") if args.udp: scanner.scan(technique="udp")
class Parser(object): """ Parser """ def __init__(self, filename, src): self.file = token.File(filename) self.scanner = Scanner(self.file, src) # self.pos = None # self.tok = None # self.lit = None self.token = None self.line_num = 0 self.next_token() @property def pos(self): return self.token.pos @property def tok(self): return self.token.tok @property def lit(self): return self.token.lit def skip_newlines(self): while self.tok == token.tk_newline: self.skip(token.tk_newline) self.line_num += 1 def skip(self, tok): """跳过""" print("parser.skip....", tok) if self.tok == tok: self.next_token() else: self.error("parser.skip bad skip...", self.tok, tok) # 非预期 def next_token(self): """获取下一个token""" print("next_token...") self.token = self.scanner.scan() # self.pos, self.tok, self.lit = self.scanner.scan() print('parser.next_token--------------------', self.pos, self.tok, self.lit) # if self.tok == token.EOF: # pass # else: # # # self.next_token() # pass def error(self, *args): """error""" print("Parser.error >>", self.pos, ", ", self.tok, ", ", self.lit, ",", *args) exit(1) def parse_file(self): """parse_file""" print('parse_file.....') file_node = ast.File() self.skip_newlines() while self.tok != token.EOF: node = self.statement() file_node.append_statements(node) # self.next_token() self.skip_newlines() if self.tok != token.EOF: self.error("parse_file >> bad end...") # 解析完一个完整的表达式后,没有结束 print("File... >>", file_node) return file_node def statement(self): """语句""" print("statement...") node = self.compound_statement() # print("statement...>>", node) return node def compound_statement(self): """复合语句""" print("compound_statement...") self.skip_newlines() if self.tok == token.kw_def: node = self.function_def_statement() elif self.tok == token.kw_if: node = self.if_statement() elif self.tok == token.kw_for: node = self.for_statement() else: node = self.simple_statement() self.skip_newlines() # print("compound_statement...>>", node) return node def function_def_statement(self): """ function_def_statement """ print("function_def_statement....", self.tok, self.lit) self.skip(token.kw_def) # ident = self.tok ident = ast.Identifier(self.pos, self.tok, self.lit) self.next_token() self.skip(token.tk_left_parenthesis) # if self.tok == token.tk_right_parenthesis: # param_list = None # else: param_list = self.param_list() # self.next_token() self.skip(token.tk_right_parenthesis) block = self.statement_block() node = ast.DefStatement(ident, param_list, block) print("function_def_statement....>>", node) return node def param_list(self): """ param list, 函数形参列表 """ print("param_list....", self.tok, self.lit) node = ast.ParamList() if self.tok == token.tk_identifier: node.append_identifier(ast.Identifier(self.pos, self.tok, self.lit)) self.next_token() while self.tok == token.tk_comma: self.skip(token.tk_comma) if self.tok == token.tk_identifier: node.append_identifier( ast.Identifier(self.pos, self.tok, self.lit)) self.next_token() else: break # self.error("param error") else: pass print("param_list...>", node) return node def statement_block(self): """ 语句块 """ print("statement_block....", self.tok, self.lit) self.skip(token.tk_left_braces) self.skip_newlines() node = ast.StatementBlock() while self.tok != token.tk_right_braces: node1 = self.statement() node.append_statement(node1) self.skip(token.tk_right_braces) return node def for_statement(self): """ :return: """ print("for_statement....", self.tok, self.lit) self.skip(token.kw_for) expression = self.expression_statement() block = self.statement_block() node = ast.ForStatement(expression, block) return node def if_statement(self): """ :return: """ print("if_statement....", self.tok, self.lit) self.skip(token.kw_if) node = ast.IfStatement() expression = self.expression_statement() block = self.statement_block() node.append_elif(expression, block) while self.tok == token.kw_elif: self.skip(token.kw_elif) expression = self.expression_statement() block = self.statement_block() node.append_elif(expression, block) if self.tok == token.kw_else: self.skip(token.kw_else) block = self.statement_block() node.set_else_block(block) return node def simple_statement(self): """ 简单语句 :return: """ print("simple_statement...") node = ast.SimpleStatement() node1 = self.small_statement() node.append_small_statement(node1) # self.next_token() while self.tok == token.tk_semicolon: self.skip(token.tk_semicolon) if self.tok == token.EOF: break if self.tok == token.tk_newline: break node1 = self.small_statement() node.append_small_statement(node1) print("simple_statement...>>", node) return node def small_statement(self): """ 小语句 :return: """ print("small_statement...") if self.tok == token.kw_print: node = self.print_statement() elif self.tok == token.kw_import: node = self.import_statement() elif self.tok == token.kw_return: node = self.return_statement() elif self.tok == token.kw_break: node = self.break_statement() elif self.tok == token.kw_continue: node = self.continue_statement() else: node = self.expression_statement() if isinstance(node, ast.Identifier) or isinstance( node, ast.PeriodForm): if self.tok == token.tk_assign: self.skip(token.tk_assign) node2 = self.expression_statement() node = ast.AssignmentStatement(node, node2) print("small_statement...>>", node) return node def import_statement(self): """ import """ print("import statement...") self.skip(token.kw_import) node = self.path_statement() if self.tok == token.kw_as: self.skip(token.kw_as) if self.tok == token.tk_identifier: identifier = ast.Identifier(self.pos, self.tok, self.lit) else: self.error("as error") return else: identifier = None print("import statement ....>>") return ast.ImportStatement(node, identifier) def path_statement(self): print("path statement....") identifier_list = [] identifier = ast.Identifier(self.pos, self.tok, self.lit) identifier_list.append(identifier) self.next_token() while self.tok == token.tk_period: self.skip(token.tk_period) if self.tok == token.tk_identifier: identifier = ast.Identifier(self.pos, self.tok, self.lit) identifier_list.append(identifier) self.next_token() else: self.error("path statement...") print("path statement....>>") return ast.PathStatement(identifier_list) def return_statement(self): """ :return: """ self.skip(token.kw_return) if self.tok == token.tk_semicolon or self.tok == token.tk_newline: node = None else: node = self.expression() node = ast.ReturnStatement(node) return node def continue_statement(self): """ :return: """ node = ast.ContinueStatement() self.skip(token.kw_continue) return node def break_statement(self): """ :return: """ node = ast.BreakStatement() self.skip(token.kw_break) return node def print_statement(self): """print""" self.skip(token.kw_print) self.skip(token.tk_left_parenthesis) node = self.expression_list() self.skip(token.tk_right_parenthesis) return ast.PrintStatement(node) def expression_list(self): """表达式列表""" node = ast.ExpressionList() node1 = self.expression_statement() node.append_expression(node1) while self.tok == token.tk_comma: self.skip(token.tk_comma) node1 = self.expression_statement() node.append_expression(node1) return node def expression_statement(self): """ 表达式语句 :return: """ print("expression_statement...") node = self.expression() print("expression_statement...>>", node) return node def expression(self): """ 表达式 :return: """ return self.boolean_expression() def boolean_expression(self): """ 布尔表达式 :return: """ return self.or_operation_expression() def or_operation_expression(self): """ or操作表达式 :return: """ node = self.and_operation_expresion() # self.next_token() while self.tok == token.kw_or: self.skip(token.kw_or) node2 = self.and_operation_expresion() node = ast.OrExpression(node, node2) return node def and_operation_expresion(self): """ and操作表达式 :return: """ node = self.not_operation_expression() while self.tok == token.kw_and: self.skip(token.kw_and) node2 = self.not_operation_expression() node = ast.AndExpression(node, node2) return node def not_operation_expression(self): """ not操作表达式 :return: """ if self.tok == token.kw_not: self.skip(token.kw_not) node2 = self.not_operation_expression() node = ast.NotExpression(node2) else: node = self.comparison_expression() # # node = self.comparison_expression() # while self.tok == token.kw_not: # self.skip(token.kw_not) # node2 = self.not_operation_expression() # node = ast.NotExpression(node2) return node def comparison_expression(self): """ 比较运算表达式 :return: """ print("comparison_expression...") node = self.binary_operation_expression() while self.tok in ( token.tk_equal, token.tk_not_equal, token.tk_less_than, token.tk_less_than_or_equal, token.tk_greater_than, token.tk_greater_than_or_equal, token.kw_is, token.kw_in, ): tok2 = self.tok self.skip(tok2) print('tok2', tok2) node2 = self.binary_operation_expression() print('tok2', tok2) if tok2 == token.tk_equal: node = ast.EqualExpression(node, node2) elif tok2 == token.tk_not_equal: node = ast.NotEqualExpression(node, node2) elif tok2 == token.tk_less_than: node = ast.LessThanExpression(node, node2) elif tok2 == token.tk_less_than_or_equal: node = ast.LessThanOrEqualExpression(node, node2) elif tok2 == token.tk_greater_than: node = ast.GreaterThanExpression(node, node2) elif tok2 == token.tk_greater_than_or_equal: node = ast.GreaterThanOrEqualExpression(node, node2) elif tok2 == token.kw_is: node = ast.IsExpression(node, node2) elif tok2 == token.kw_in: node = ast.InExpression(node, node2) else: self.error("comparison_expression unexcept tok", tok2) print("comparison_expression...>>", node) return node def binary_operation_expression(self): """ 二元操作运算符 :return: """ print("binary_operation_expression...") node = self.relational_expression() print("binary_operation_expression...>>", node) return node def relational_expression(self): """加减类表达式""" print("relational_expression....") node = self.multiplicative_expression() while self.tok == token.tk_plus or self.tok == token.tk_minus_sign: tok1 = self.tok self.skip(tok1) node2 = self.multiplicative_expression() if tok1 == token.tk_plus: node = ast.PlusExpression(node, node2) elif tok1 == token.tk_minus_sign: node = ast.MinusSignExpression(node, node2) else: Exception("") print("relational_expression...>", node) return node def multiplicative_expression(self): """乘除类表达式""" print("multiplicative_expression....") node = self.unary_expression() while self.tok == token.tk_divide or self.tok == token.tk_star: tok1 = self.tok self.next_token() node2 = self.unary_expression() if tok1 == token.tk_divide: node = ast.DivideExpression(node, node2) elif tok1 == token.tk_star: node = ast.StarExpression(node, node2) else: Exception("multiplicative_expression unexcept tok1 ", tok1) print("multiplicative_expression...>", node) return node def unary_expression(self): """一元表达式""" print("unary_expression....") # node = self.atom() if self.tok == token.tk_plus or self.tok == token.tk_minus_sign: # + - tok1 = self.tok self.next_token() node = self.unary_expression() node = ast.UnaryExpression(tok1, node) else: node = self.atom() print("unary_expression...>", node) return node def atom(self): """原子""" print("atom....", self.tok, self.lit) if self.tok == token.tk_left_parenthesis: # ( self.skip(token.tk_left_parenthesis) node = self.boolean_expression() node = ast.ParenthForm(node) self.skip(token.tk_right_parenthesis) elif self.tok == token.tk_left_middle_bracket: # [ self.skip(token.tk_left_middle_bracket) node = self.expression_list() node = ast.ListDisplay(node) self.skip(token.tk_right_middle_bracket) elif self.tok == token.tk_identifier: # node = ast.Identifier(self.pos, self.tok, self.lit) self.next_token() elif self.tok == token.kw_self: # self node = ast.Self() self.skip(token.kw_self) elif self.tok == token.tk_string: node = ast.StringLiteral(self.pos, self.tok, self.lit) self.next_token() elif self.tok == token.tk_floatnumber: node = ast.FloatNumber(self.pos, self.tok, self.lit) self.next_token() elif self.tok == token.tk_integer: node = ast.Integer(self.pos, self.tok, self.lit) self.next_token() else: node = None self.error('atom unexcept >>', self.pos, self.tok, self.lit, node) while self.tok == token.tk_left_parenthesis \ or self.tok == token.tk_period: if self.tok == token.tk_left_parenthesis: self.skip(token.tk_left_parenthesis) if self.tok == token.tk_right_parenthesis: node2 = None else: node2 = self.expression_list() node = ast.Call(node, node2) self.skip(token.tk_right_parenthesis) elif self.tok == token.tk_period: self.skip(token.tk_period) if self.tok == token.tk_identifier: node = ast.PeriodForm( node, ast.Identifier(self.pos, self.tok, self.lit)) self.next_token() else: node = None self.error('atom unexcept parse periodform >>', self.pos, self.tok, self.lit, node) else: self.error("atom unexcept, in while....>>") return node
def setUp(self) -> None: script = open(os.path.join("sample_lox", "every_symbol.lox"), 'r') self.scanner = Scanner(script.read()) script.close() self.expected_tokens = [] self.expected_tokens.append(Token(TokenType.FUN, "fun", None, 1)) self.expected_tokens.append( Token(TokenType.IDENTIFIER, "funcTest", None, 1)) self.expected_tokens.append(Token(TokenType.LEFT_PAREN, "(", None, 1)) self.expected_tokens.append(Token(TokenType.IDENTIFIER, "arg", None, 1)) self.expected_tokens.append(Token(TokenType.RIGHT_PAREN, ")", None, 1)) self.expected_tokens.append(Token(TokenType.LEFT_BRACE, "{", None, 1)) self.expected_tokens.append( Token(TokenType.IDENTIFIER, "math", None, 2)) self.expected_tokens.append(Token(TokenType.EQUAL, "=", None, 2)) self.expected_tokens.append(Token(TokenType.NUMBER, "5", 5, 2)) self.expected_tokens.append(Token(TokenType.MINUS, "-", None, 2)) self.expected_tokens.append(Token(TokenType.NUMBER, "5", 5, 2)) self.expected_tokens.append(Token(TokenType.PLUS, "+", None, 2)) self.expected_tokens.append(Token(TokenType.NUMBER, "5", 5, 2)) self.expected_tokens.append(Token(TokenType.STAR, "*", None, 2)) self.expected_tokens.append(Token(TokenType.NUMBER, "5", 5, 2)) self.expected_tokens.append(Token(TokenType.SLASH, "/", None, 2)) self.expected_tokens.append(Token(TokenType.NUMBER, "5", 5, 2)) self.expected_tokens.append(Token(TokenType.SEMICOLON, ";", None, 2)) self.expected_tokens.append(Token(TokenType.CLASS, "class", None, 3)) self.expected_tokens.append(Token(TokenType.IDENTIFIER, "c", None, 3)) self.expected_tokens.append(Token(TokenType.DOT, ".", None, 3)) self.expected_tokens.append( Token(TokenType.IDENTIFIER, "func", None, 3)) self.expected_tokens.append(Token(TokenType.NUMBER, "5", 5, 4)) self.expected_tokens.append(Token(TokenType.EQUAL, "=", None, 4)) self.expected_tokens.append(Token(TokenType.NUMBER, "5", 5, 4)) self.expected_tokens.append(Token(TokenType.BANG_EQUAL, "!=", None, 4)) self.expected_tokens.append(Token(TokenType.NUMBER, "6", 6, 4)) self.expected_tokens.append(Token(TokenType.EQUAL_EQUAL, "==", None, 4)) self.expected_tokens.append(Token(TokenType.NUMBER, "6", 6, 4)) self.expected_tokens.append(Token(TokenType.GREATER, ">", None, 4)) self.expected_tokens.append(Token(TokenType.NUMBER, "4", 4, 4)) self.expected_tokens.append( Token(TokenType.GREATER_EQUAL, ">=", None, 4)) self.expected_tokens.append(Token(TokenType.NUMBER, "4", 4, 4)) self.expected_tokens.append(Token(TokenType.LESS, "<", None, 4)) self.expected_tokens.append(Token(TokenType.NUMBER, "10", 10, 4)) self.expected_tokens.append(Token(TokenType.LESS_EQUAL, "<=", None, 4)) self.expected_tokens.append(Token(TokenType.NUMBER, "10", 10, 4)) self.expected_tokens.append(Token(TokenType.STRING, '"Str"', "Str", 5)) self.expected_tokens.append(Token(TokenType.AND, "and", None, 6)) self.expected_tokens.append(Token(TokenType.ELSE, "else", None, 6)) self.expected_tokens.append(Token(TokenType.FALSE, "false", None, 6)) self.expected_tokens.append(Token(TokenType.FOR, "for", None, 6)) self.expected_tokens.append(Token(TokenType.IF, "if", None, 6)) self.expected_tokens.append(Token(TokenType.NIL, "nil", None, 6)) self.expected_tokens.append(Token(TokenType.OR, "or", None, 6)) self.expected_tokens.append(Token(TokenType.PRINT, "print", None, 6)) self.expected_tokens.append(Token(TokenType.SUPER, "super", None, 6)) self.expected_tokens.append(Token(TokenType.THIS, "this", None, 6)) self.expected_tokens.append(Token(TokenType.TRUE, "true", None, 6)) self.expected_tokens.append(Token(TokenType.WHILE, "while", None, 6)) self.expected_tokens.append(Token(TokenType.RETURN, "return", None, 8)) self.expected_tokens.append(Token(TokenType.RIGHT_BRACE, "}", None, 9)) self.expected_tokens.append(Token(TokenType.EOF, "", None, 9))
class Parser(object): def __init__(self, filename, src): self.file = token.File(filename) self.scanner = Scanner(self.file, src) self.pos = None self.tok = None self.lit = None self.next_token() def next_token(self): """获取下一个token""" print("next_token...") self.pos, self.tok, self.lit = self.scanner.scan() print('--------------------', self.pos, self.tok, self.lit) # if self.tok == token.EOF: # pass # else: # # # self.next_token() # pass def error(self, *args): print("error....", self.pos, self.tok, self.lit, args) exit(1) def parse_file(self): file = ast.File() while self.tok != token.EOF: node = self.statement() file.append_statements(node) if self.tok != token.EOF: self.error("bad end...") # 解析完一个完整的表达式后,没有结束 print("File", file) return file def statement(self): """语句""" if self.tok == token.IDENT: if self.lit == token.KW_PRINT: # print语句 node = self.print_statement() return node node = self.expression() return node def print_statement(self): """print""" self.next_token() self.skip(token.LPAREN) node = self.param_list() self.skip(token.RPAREN) return ast.Print(node) def param_list(self): """参数列表""" node = ast.ParamList() node1 = self.relational_expression() node.append_param(node1) while self.tok == token.COMMA: self.skip(token.COMMA) node1 = self.relational_expression() node.append_param(node1) return node def expression(self): """表达式""" print("expression....") node = self.assignment_expression() print("expression...>", node) return node def assignment_expression(self): """赋值表达式""" if self.tok == token.IDENT: node = ast.Ident(self.pos, self.tok, self.lit) self.next_token() if self.tok == token.ASSIGN: self.next_token() node2 = self.relational_expression() return ast.Assign(node, node2) else: self.error("assignment error..") else: self.error("assignment error") def relational_expression(self): """加减类表达式""" print("relational_expression....") node = self.multiplicative_expression() while self.tok == token.ADD or self.tok == token.SUB: tok1 = self.tok self.next_token() node2 = self.multiplicative_expression() if tok1 == token.ADD: node = ast.Add(node, node2) elif tok1 == token.SUB: node = ast.Sub(node, node2) else: Exception("") print("relational_expression...>", node) return node def multiplicative_expression(self): """乘除类表达式""" print("multiplicative_expression....") node = self.unary_expression() while self.tok == token.DIV or self.tok == token.MUL: tok1 = self.tok self.next_token() node2 = self.unary_expression() if tok1 == token.DIV: node = ast.Div(node, node2) elif tok1 == token.MUL: node = ast.Mul(node, node2) else: Exception("") print("multiplicative_expression...>", node) return node def unary_expression(self): """一元表达式""" print("unary_expression....") node = self.primary_expression() # if self.tok == token.ADD or self.tok == token.SUB or self.tok == token.MUL: # # tok1 = self.tok # # self.next_token() # ret2 = self.unary_expression() # # if tok1 == token.ADD: # ret += ret2 # print(ret - ret2, "+", ret2) # elif tok1 == token.SUB: # ret -= ret2 # print(ret + ret2, "-", ret2) # elif tok1 == token.MUL: # ret *= ret2 # print(ret / ret2, "*", ret2) # else: # self.error() print("unary_expression...>", node) return node def primary_expression(self): """初值表达式""" print("primary_expression....", self.tok, self.lit) if self.tok == token.NUMBER: node = ast.Number(self.pos, self.tok, self.lit) self.next_token() print("primary_expression...>", node) return node elif self.tok == token.IDENT: node = ast.Ident(self.pos, self.tok, self.lit) self.next_token() print("primary_expression...>", node) return node elif self.tok == token.LPAREN: self.next_token() node = self.relational_expression() self.skip(token.RPAREN) print("primary_expression...>", node) return node else: self.error("bad express...") # 两个符号连续了 def skip(self, tok): """跳过""" if self.tok == tok: self.next_token() else: self.error("bad skip...", self.tok, tok) # 非预期
def setUp(self): self.servers = [Mock(), Mock()] self.scanner = Scanner(servers=self.servers)