def make_literal_node(self, lf, lit, make_string) -> ast.Literal: """ Produce the literal node which does not contain any literal, but pointer to the stored literal bytes. :param self: :param lf: line, file :param lit: the literal, could be int, float, bool, or str :param make_string: True if make string, False if make char """ # print(lit, type(lit)) if isinstance(lit, bool): # in python, bool is int but int is not bool b = typ.boolean_to_bytes(lit) lit_type = 2 elif isinstance(lit, int): b = typ.int_to_bytes(lit) lit_type = 0 elif isinstance(lit, float): b = typ.float_to_bytes(lit) lit_type = 1 elif isinstance(lit, str): b = typ.string_to_bytes(lit) if make_string: lit_type = 3 else: if len(b) != 1: raise stl.ParseException( "Only ascii characters can be char") lit_type = 4 else: raise stl.ParseException("Unexpected literal type") if lit_type == 2: # special case for boolean, since in python, bool and int has the same hash if lit: return ast.Literal(lf, 1, 2) else: return ast.Literal(lf, 0, 2) elif lit in self.literals: pos = self.literals[lit] node = ast.Literal(lf, pos, lit_type) return node else: ptr = len(self.literal_bytes) self.literals[lit] = ptr self.literal_bytes.extend(b) node = ast.Literal(lf, ptr, lit_type) if lit_type == 3: self.string_lengths[ptr] = len(b) return node
def build_parenthesis(self): if self.inner.inner: self.inner.build_parenthesis() else: self.inner.build_line() block = self.inner.get_as_block() self.invalidate_inner() if len(block.lines) != 1: if len(block.lines) == 0: raise stl.ParseException("Empty parenthesis") else: raise stl.ParseException( "Too many elements in parenthesis, in file '{}', at line {}" .format(block.lines[-1].file, block.lines[-1].line_num)) self.stack.append(block.lines[0])
def line_tokenize(self, non_literal, line_num): """ Tokenize a line, with string literals removed. :param non_literal: text to be tokenize, no string literal :param line_num: the line number :return: None """ lst = normalize(non_literal) for part in lst: if part.isidentifier(): self.tokens.append(stl.IdToken(line_num, part)) elif is_float(part): self.tokens.append(stl.NumToken(line_num, part)) elif is_integer(part): self.tokens.append(stl.NumToken(line_num, part)) elif stl.is_in_all(part): self.tokens.append(stl.IdToken(line_num, part)) elif part[:-1] in stl.OP_EQ: self.tokens.append(stl.IdToken(line_num, part)) elif part == stl.EOL: self.tokens.append(stl.IdToken(line_num, stl.EOL)) elif part in stl.OMITS: pass else: raise stl.ParseException( "Unknown symbol: '{}', at line {}".format(part, line_num))
def restore_tokens(self, file: _io.BytesIO): self.tokens.clear() while True: flag = int.from_bytes(file.read(1), "big") if flag == 0: self.tokens.append(stl.Token((stl.EOF, None))) break else: line = int(stl.read_string(file)) file_name = stl.read_string(file) lf = line, file_name if flag == 1: token: stl.NumToken = stl.NumToken(lf, stl.read_string(file)) elif flag == 2: token: stl.LiteralToken = stl.LiteralToken( lf, stl.read_string(file)) elif flag == 3: token: stl.IdToken = stl.IdToken(lf, stl.read_string(file)) elif flag == 4: token: stl.DocToken = stl.DocToken(lf, stl.read_string(file)) else: raise stl.ParseException("Unknown flag: {}".format(flag)) self.tokens.append(token)
def parse_expr(lst): # print(lst) while len(lst) > 1: max_pre = 0 index = 0 for i in range(len(lst)): node = lst[i] if isinstance(node, UnaryOperator): pre = node.precedence() if pre > max_pre and node.value is None: max_pre = pre index = i elif isinstance(node, BinaryExpr): pre = node.precedence() if pre > max_pre and node.left is None and node.right is None: max_pre = pre index = i elif isinstance(node, TernaryOperator): pre = node.precedence() if pre > max_pre and node.left is None and node.mid is None and node.right is None: max_pre = pre index = i elif isinstance(node, InDecrementOperator): pre = node.precedence() if pre > max_pre and node.value is None: max_pre = pre index = i operator = lst[index] if isinstance(operator, UnaryOperator): operator.value = lst[index + 1] lst.pop(index + 1) elif isinstance(operator, BinaryExpr): operator.left = lst[index - 1] operator.right = lst[index + 1] lst.pop(index + 1) lst.pop(index - 1) elif isinstance(operator, TernaryOperator): operator.left = lst[index - 1] operator.mid = lst[index + 1] operator.right = lst[index + 2] lst.pop(index + 1) lst.pop(index + 1) lst.pop(index - 1) elif isinstance(operator, InDecrementOperator): is_post = True if index < len(lst) - 1: node = lst[index + 1] if isinstance(node, NameNode) or isinstance(node, Dot): is_post = False operator.is_post = is_post if is_post: operator.value = lst[index - 1] lst.pop(index - 1) else: operator.value = lst[index + 1] lst.pop(index + 1) else: raise stl.ParseException("Unknown error while parsing operators") return lst[0]
def get_last(self): if self.inner: return self.inner.get_last() else: if len(self.stack) > 0: return self.stack[-1] else: raise stl.ParseException("Nothing before this")
def get_number(line, v: str): try: if "." in v: return float(v) else: return int(v) except TypeError: raise stl.ParseException("Unexpected syntax: '{}', at line {}".format(v, line))
def build_parenthesis(self): if self.inner.inner: self.inner.build_parenthesis() else: self.inner.build_line() block = self.inner.get_as_block() self.invalidate_inner() if len(block.lines) != 1: raise stl.ParseException("Empty parenthesis") self.stack.append(block.lines[0])
def add_bool(self, line, v: str): if self.inner: self.inner.add_bool(line, v) else: if v == "true": node = True elif v == "false": node = False else: raise stl.ParseException("Unknown boolean value.") self.stack.append(node)
def try_build_func(self): if self.inner: self.inner.try_build_func() else: if len(self.stack) >= 2: prob_func_node = self.stack[-2] block_node = self.stack[-1] if isinstance(prob_func_node, DefStmt): if isinstance(block_node, BlockStmt): if prob_func_node.abstract: raise stl.ParseException("Abstract function cannot have body") prob_func_node.body = block_node self.stack.pop()
def get_as_block(self) -> BlockStmt: if len(self.stack) > 0 or self.in_expr: raise stl.ParseException("Line is not terminated") return self.elements
def build_line(self): if self.inner: self.inner.build_line() else: self.build_expr() if len(self.stack) > 0: lst = [self.stack.pop()] while len(self.stack) > 0: node = self.stack.pop() if isinstance(node, LeafNode): lst.__setitem__( 0, node) if len(lst) > 0 else lst.append(node) # elif isinstance(node, AssignmentNode) and len(lst) > 0: # node.right = lst[0] # lst[0] = node elif isinstance( node, UnaryExpr) and len(lst) > 0 and node.value is None: # The build-expr method was interrupted by something node.value = lst[0] lst[0] = node elif isinstance(node, BinaryExpr ) and len(lst) > 0 and node.right is None: node.right = lst[0] lst[0] = node # elif isinstance(node, AnnotationNode) and len(lst) > 0 and node.body is None: # node.body = lst[0] # lst[0] = node # last: AssignmentNode = lst[0] # func: DefStmt = last.right # func.tags = node # elif isinstance(node, TernaryOperator) and len(lst) > 0: # node.right = lst[0] # lst[0] = node elif isinstance(node, BlockStmt): if len(lst) > 0: lst.insert(0, node) else: lst.append(node) # res = node elif isinstance(node, IfStmt): if len(lst) == 1: node.then_block = lst[0] # elif len(lst) == 2: # node.then_block = lst[0] # node.else_block = lst[1] elif len(lst) != 0: raise stl.ParseException( "Unexpected token, in file '{}', at line {}". format(node.file, node.line_num)) lst.clear() lst.append(node) elif isinstance(node, WhileStmt) or isinstance( node, ForLoopStmt): if len(lst) == 1: node.body = lst.pop() lst.append(node) elif len(lst) != 0: raise stl.ParseException("Unexpected token") # node.body = lst[0] if len(lst) > 0 else None # lst.__setitem__(0, node) if len(lst) > 0 else lst.append(node) # elif isinstance(node, CatchStmt): # node.then = lst[0] if len(lst) > 0 else None # lst.__setitem__(0, node) if len(lst) > 0 else lst.append(node) # elif isinstance(node, TryStmt): # node.try_block = lst[0] # if isinstance(lst[-1], CatchStmt): # node.catch_blocks = lst[1:] # else: # node.catch_blocks = lst[1:-1] # node.finally_block = lst[-1] # lst.clear() # lst.append(node) else: lst.__setitem__( 0, node) if len(lst) > 0 else lst.append(node) # res = node if len(self.elements.lines) > 0: last = self.elements.lines[-1] if place_else(last, lst[0]): return self.elements.add_line(lst[0])
def parse(self): """ Parses the list of tokens stored in this Lexer, and returns the root node of the parsed abstract syntax tree. :return: the parsed block """ parser = ast.AbstractSyntaxTree() i = 0 func_count = 0 par_count = 0 # count of parenthesis square_count = 0 # count of square bracket cond_nest_list = [] call_nest_list = [] param_nest_list = [] square_nest_list = [] is_abstract = False is_extending = False is_conditional = False is_type_param = False var_level = ast.ASSIGN brace_count = 0 struct_braces = [] # import_braces = [] while True: try: token = self.tokens[i] line = (token.line_number(), token.file_name()) if isinstance(token, stl.IdToken): sym = token.symbol if sym == "if": parser.add_if(line) is_conditional = True elif sym == "else": parser.add_else() elif sym == "while": parser.add_while(line) is_conditional = True elif sym == "for": parser.add_for_loop(line) is_conditional = True elif sym == "return": # parser.add_unary(line, "return") parser.add_return(line) elif sym == "break": parser.add_break(line) elif sym == "continue": parser.add_continue(line) elif sym == "true" or sym == "false": lit_node = self.make_literal_node( line, True if sym == "true" else False, False) parser.add_bool(line, lit_node) elif sym == "null": parser.add_null(line) elif sym == "const": var_level = ast.CONST elif sym == "var": var_level = ast.VAR # elif sym == "@": # i += 1 # next_token: stl.IdToken = self.tokens[i] # # titles.append(next_token.symbol) # parser.add_annotation(line, next_token.symbol) elif sym == "{": brace_count += 1 if is_conditional: is_conditional = False parser.build_expr() parser.build_condition() parser.new_block() elif is_extending: is_extending = False parser.build_extends() parser.new_block() elif is_type_param: parser.build_type_param() typ = parser.pop_last() func_node: ast.DefStmt = parser.get_last() func_node.r_type = typ parser.new_block() is_type_param = False else: last_token = self.tokens[i - 1] if isinstance(last_token, stl.IdToken) and \ (last_token.symbol == ")" or # is a function is_identifier_before_block(last_token.symbol)): # is a class or a dotted import parser.new_block() else: parser.add_dict() elif sym == "}": brace_count -= 1 parser.build_block() parser.try_build_func() # if is_this_list(import_braces, brace_count): # parser.build_import() # import_braces.pop() if is_this_list(struct_braces, brace_count): # parser.build_class() parser.build_struct() struct_braces.pop() next_token = self.tokens[i + 1] if not (isinstance(next_token, stl.IdToken) and next_token.symbol in stl.NO_BUILD_LINE): parser.build_line() elif sym == "(": if i > 0 and is_call(self.tokens[i - 1]): parser.add_call(line) call_nest_list.append(par_count) else: parser.add_parenthesis() par_count += 1 elif sym == ")": par_count -= 1 next_sig_token = self.find_next_significant_token(i) if is_this_list(call_nest_list, par_count): parser.build_line() parser.build_call() call_nest_list.pop() elif is_this_list(param_nest_list, par_count): parser.build_func_params() param_nest_list.pop() parser.begin_type_param(line) is_type_param = True # i += 1 # omit the next ':' symbol elif next_sig_token.is_identifier( ) and next_sig_token.symbol == "->": parser.build_lambda_parameters() else: parser.build_parenthesis() elif sym == "[": if i > 0 and is_call(self.tokens[i - 1]): parser.add_getitem(line) else: print(123123) # else: # square_nest_list.append(square_count) # func_name = "list" # if i > 0: # last_token = self.tokens[i - 1] # if isinstance(last_token, stl.IdToken) and last_token.symbol == "~": # func_name = "array" # parser.add_name(line, func_name) # parser.add_call(line) square_count += 1 elif sym == "]": square_count -= 1 if is_this_list(square_nest_list, square_count): # end of list creation square_nest_list.pop() parser.build_call() else: parser.build_getitem() elif sym == "=": # parser.build_expr() # print(parser.get_last()) parser.add_assignment(line, var_level) var_level = ast.ASSIGN elif sym == ":=": parser.build_expr() parser.add_assignment(line, ast.VAR) elif sym == ",": if var_level == ast.ASSIGN: # the normal level parser.build_line() elif sym == ".": parser.add_dot(line) elif sym == "~": # a special mark pass elif sym == "fn": func_doc = self.get_doc(i) i += 1 f_token: stl.IdToken = self.tokens[i] f_name = f_token.symbol push_back = 1 if f_name == "(": func_count += 1 push_back = 0 elif f_name.isidentifier(): parser.add_name(line, f_name) parser.add_assignment(line, ast.FUNC_DEFINE) else: raise stl.ParseException( "Illegal function name '{}', in file '{}', at line {}" .format(f_name, line[1], line[0])) parser.add_function(line, is_abstract, func_doc) i += push_back param_nest_list.append(par_count) par_count += 1 is_abstract = False elif sym == "struct": i += 1 name_token: stl.IdToken = self.tokens[i] parser.add_struct(line, name_token.symbol) struct_braces.append(brace_count) # elif sym == "->": # parser.add_lambda(line) # elif sym == "<-": # parser.add_anonymous_class(line) # elif sym == "operator": # func_doc = self.get_doc(i) # i += 1 # op_token = self.tokens[i] # group = stl.BINARY_OPERATORS # if isinstance(op_token, stl.NumToken): # v = int(op_token.value) # if v == 1: # group = stl.UNARY_OPERATORS # elif v == 2: # group = stl.BINARY_OPERATORS # elif v == 3: # group = stl.TERNARY_OPERATORS # else: # raise stl.ParseException("Unsupported operator kind") # i += 1 # op_token = self.tokens[i] # op_name = "__" + group[op_token.symbol] + "__" # parser.add_name(line, op_name) # parser.add_assignment(line, ast.FUNC_DEFINE) # parser.add_function(line, False, func_doc) # param_nest_list.append(par_count) # par_count += 1 # i += 1 # elif sym == "class": # class_doc = self.get_doc(i) # i += 1 # c_token: stl.IdToken = self.tokens[i] # class_name = c_token.symbol # if class_name in stl.NO_CLASS_NAME: # raise stl.ParseException("Name '{}' is forbidden for class name".format(class_name)) # parser.add_class( # (c_token.line_number(), c_token.file_name()), # class_name, # is_abstract, # class_doc # ) # class_braces.append(brace_count) # is_abstract = False # elif sym == "extends": # parser.add_extends() # is_extending = True # elif sym == "abstract": # next_token = self.tokens[i + 1] # if isinstance(next_token, stl.IdToken) and next_token.symbol in ABSTRACT_IDENTIFIER: # is_abstract = True # else: # raise stl.ParseException("Unexpected token 'abstract', in file '{}', at line {}" # .format(line[1], line[0])) # # parser.add_abstract(line) # elif sym == "new": # parser.add_unary(line, "new") # elif sym == "throw": # parser.add_unary(line, "throw") # elif sym == "try": # parser.add_try(line) # elif sym == "catch": # parser.add_catch(line) # is_conditional = True # elif sym == "finally": # parser.add_finally(line) elif sym == "assert": parser.add_unary(line, "assert") elif sym == "++" or sym == "--": parser.add_increment_decrement(line, sym) elif sym in stl.TERNARY_OPERATORS and \ (parser.is_in_ternary() or stl.TERNARY_OPERATORS[sym]): # This check should go strictly before the check of binary ops if parser.is_in_ternary(): parser.finish_ternary(line, sym) else: parser.add_ternary(line, sym) elif sym in stl.BINARY_OPERATORS: if sym == "-" and (i == 0 or is_unary(self.tokens[i - 1])): parser.add_unary(line, "neg") elif sym == "*" and (i == 0 or is_unary( self.tokens[i - 1]) or is_type_param): parser.add_unary(line, "unpack") elif sym == "&" and (i == 0 or is_unary( self.tokens[i - 1])): # the command-line notation parser.add_unary(line, "pack") elif sym == ":": parser.add_type_node(line) else: parser.add_operator(line, sym) elif sym in stl.UNARY_OPERATORS: if sym == "!": parser.add_unary(line, "!") else: parser.add_unary(line, sym) elif sym[:-1] in stl.OP_EQ: parser.add_operator(line, sym, True) # elif sym == "import": # i += 2 # name_token: stl.IdToken = self.tokens[i - 1] # path_token: stl.IdToken = self.tokens[i] # # print(name_token) # import_name = name_token.symbol # parser.add_import(line, import_name, path_token.symbol) # import_braces.append(brace_count) elif token.is_eol(): if var_level != ast.ASSIGN: # active = parser.get_active() # # active.build_line() # active.build_expr() # parser.add parser.add_assignment(line, var_level) parser.add_undefined(line) parser.build_line() # print(parser.get_last()) var_level = ast.ASSIGN parser.build_line() else: parser.add_name(line, sym) # auth = stl.PUBLIC elif isinstance(token, stl.NumToken): value = token.value num = get_number(line, value) num_lit = self.make_literal_node(line, num, False) parser.add_number(line, num_lit) elif isinstance(token, stl.LiteralToken): value = token.text lit = self.make_literal_node(line, value, token.is_string) if token.is_string: str_len = self.string_lengths[lit.lit_pos] parser.add_string_literal(line, lit, str_len) else: parser.add_char(line, lit) elif isinstance(token, stl.DocToken): pass elif token.is_eof(): parser.build_line() break else: stl.unexpected_token(token) i += 1 except stl.ParseException as e: raise e except Exception: raise stl.ParseException( "Parse error in '{}', at line {}".format( self.tokens[i].file_name(), self.tokens[i].line_number())) if par_count != 0 or len(call_nest_list) != 0 or len(cond_nest_list) != 0 or len(param_nest_list) or \ len(square_nest_list) != 0 or brace_count != 0: raise stl.ParseEOFException( "Reach the end while parsing, {},{},{},{}".format( par_count, call_nest_list, cond_nest_list, param_nest_list)) return parser.get_as_block()
def find_import(self, from_, to): """ Looks for import statement between the given slice of the tokens list. :param from_: the beginning position of search :param to: the end position of search :return: None """ for i in range(from_, to, 1): token = self.tokens[i] if isinstance(token, stl.IdToken) and token.symbol == "import": next_token: stl.Token = self.tokens[i + 1] namespace_token = None if isinstance( next_token, stl.IdToken) and next_token.symbol == "namespace": namespace_token = next_token self.tokens.pop(i + 1) path_token: stl.LiteralToken = self.tokens[i + 1] elif isinstance(next_token, stl.LiteralToken): path_token: stl.LiteralToken = self.tokens[i + 1] else: raise stl.ParseException( "Unexpected token in file '{}', at line {}".format( next_token.file, next_token.line)) name = path_token.text if name[-3:] == ".sp": # user lib if len(self.script_dir) == 0: file_name = name[:-3] + ".sp" else: file_name = self.script_dir + "{}{}".format( "/", name[:-3]) + ".sp" # file_name = "{}{}{}".format(self.script_dir, os.sep, name[:-3]).replace(".", "/") + ".sp" if "/" in name: import_name = name[name.rfind("/") + 1:-3] else: import_name = name[:-3] else: # system lib file_name = "{}{}lib{}{}.sp".format( self.spl_path, os.sep, os.sep, name) import_name = name if len(self.tokens) > i + 2: as_token: stl.IdToken = self.tokens[i + 2] if as_token.symbol == "as": if namespace_token is not None: raise stl.ParseException( "Unexpected combination 'import namespace ... as ...'" ) name_token: stl.IdToken = self.tokens[i + 3] import_name = name_token.symbol self.tokens.pop(i + 1) self.tokens.pop(i + 1) self.tokens.pop(i + 1) # remove the import name token self.import_file(file_name, import_name) if namespace_token: lf = namespace_token.line, namespace_token.file self.tokens.append(namespace_token) self.tokens.append(stl.IdToken(lf, import_name)) self.tokens.append(stl.IdToken(lf, stl.EOL)) break
def parse(self): """ Parses the list of tokens stored in this Lexer, and returns the root node of the parsed abstract syntax tree. :return: the parsed block """ parser = ast.AbstractSyntaxTree() i = 0 func_count = 0 par_count = 0 # count of parenthesis square_count = 0 # count of square bracket cond_nest_list = [] call_nest_list = [] param_nest_list = [] square_nest_list = [] is_abstract = False is_extending = False var_level = ast.ASSIGN brace_count = 0 class_braces = [] import_braces = [] while True: try: token = self.tokens[i] line = (token.line_number(), token.file_name()) if isinstance(token, stl.IdToken): sym = token.symbol if sym == "if": cond_nest_list.append(par_count) par_count += 1 parser.add_if(line) i += 1 elif sym == "else": parser.add_else() elif sym == "while": cond_nest_list.append(par_count) par_count += 1 parser.add_while(line) i += 1 elif sym == "for": cond_nest_list.append(par_count) par_count += 1 parser.add_for_loop(line) i += 1 elif sym == "return": parser.add_unary(line, "return") # parser.add_return(line) elif sym == "break": parser.add_break(line) elif sym == "continue": parser.add_continue(line) elif sym == "true" or sym == "false": parser.add_bool(line, sym) elif sym == "null": parser.add_null(line) elif sym == "const": var_level = ast.CONST elif sym == "var": var_level = ast.VAR elif sym == "@": i += 1 next_token: stl.IdToken = self.tokens[i] # titles.append(next_token.symbol) parser.add_annotation(line, next_token.symbol) elif sym == "{": brace_count += 1 if is_extending: is_extending = False parser.build_extends() last_token = self.tokens[i - 1] if isinstance(last_token, stl.IdToken) and \ (last_token.symbol == ")" or # is a function is_identifier_before_block(last_token.symbol)): # is a class or a dotted import parser.new_block() else: parser.add_dict() elif sym == "}": brace_count -= 1 parser.build_block() parser.try_build_func() if is_this_list(import_braces, brace_count): parser.build_import() import_braces.pop() elif is_this_list(class_braces, brace_count): parser.build_class() class_braces.pop() next_token = self.tokens[i + 1] if not (isinstance(next_token, stl.IdToken) and next_token.symbol in stl.NO_BUILD_LINE): parser.build_line() elif sym == "(": if i > 0 and is_call(self.tokens[i - 1]): parser.add_call(line) call_nest_list.append(par_count) else: parser.add_parenthesis() par_count += 1 elif sym == ")": par_count -= 1 if is_this_list(call_nest_list, par_count): parser.build_line() parser.build_call() call_nest_list.pop() elif is_this_list(param_nest_list, par_count): parser.build_func_params() param_nest_list.pop() elif is_this_list(cond_nest_list, par_count): parser.build_expr() parser.build_condition() cond_nest_list.pop() else: parser.build_parenthesis() # extra_precedence -= 1 elif sym == "[": if i > 0 and is_call(self.tokens[i - 1]): parser.add_getitem(line) else: square_nest_list.append(square_count) func_name = "list" if i > 0: last_token = self.tokens[i - 1] if isinstance(last_token, stl.IdToken ) and last_token.symbol == "~": func_name = "array" parser.add_name(line, func_name) parser.add_call(line) square_count += 1 elif sym == "]": square_count -= 1 if is_this_list(square_nest_list, square_count): # end of list creation square_nest_list.pop() parser.build_call() else: parser.build_getitem() elif sym == "=": parser.build_expr() parser.add_assignment(line, var_level) var_level = ast.ASSIGN elif sym == ",": if var_level == ast.ASSIGN: # the normal level parser.build_line() elif sym == ".": parser.add_dot(line) elif sym == "~": # a special mark pass elif sym == "function" or sym == "def": func_doc = self.get_doc(i) i += 1 f_token: stl.IdToken = self.tokens[i] f_name = f_token.symbol push_back = 1 if f_name == "(": func_count += 1 push_back = 0 elif f_name.isidentifier(): parser.add_name(line, f_name) parser.add_assignment(line, ast.FUNC_DEFINE) else: raise stl.ParseException( "Illegal function name '{}'".format(f_name)) parser.add_function(line, is_abstract, func_doc) i += push_back param_nest_list.append(par_count) par_count += 1 is_abstract = False elif sym == "operator": func_doc = self.get_doc(i) i += 1 op_token: stl.IdToken = self.tokens[i] op_name = "__" + stl.BINARY_OPERATORS[ op_token.symbol] + "__" parser.add_name(line, op_name) parser.add_assignment(line, ast.FUNC_DEFINE) parser.add_function(line, False, func_doc) param_nest_list.append(par_count) par_count += 1 i += 1 elif sym == "class": class_doc = self.get_doc(i) i += 1 c_token: stl.IdToken = self.tokens[i] class_name = c_token.symbol if class_name in stl.NO_CLASS_NAME: raise stl.ParseException( "Name '{}' is forbidden for class name".format( class_name)) parser.add_class( (c_token.line_number(), c_token.file_name()), class_name, is_abstract, class_doc) class_braces.append(brace_count) is_abstract = False elif sym == "extends": parser.add_extends() is_extending = True elif sym == "abstract": next_token = self.tokens[i + 1] if isinstance( next_token, stl.IdToken ) and next_token.symbol in ABSTRACT_IDENTIFIER: is_abstract = True else: raise stl.ParseException( "Unexpected token 'abstract', in file '{}', at line {}" .format(line[1], line[0])) # parser.add_abstract(line) elif sym == "new": parser.add_unary(line, "new") # parser.add_class_new(line) elif sym == "throw": parser.add_unary(line, "throw") # parser.add_throw(line) elif sym == "try": parser.add_try(line) elif sym == "catch": parser.add_catch(line) i += 1 cond_nest_list.append(par_count) par_count += 1 elif sym == "finally": parser.add_finally(line) elif sym == "assert": parser.add_unary(line, "assert") elif sym == "namespace": parser.add_unary(line, "namespace") elif sym == "++" or sym == "--": parser.add_increment_decrement(line, sym) elif sym in stl.TERNARY_OPERATORS and \ (parser.is_in_ternary() or stl.TERNARY_OPERATORS[sym]): # This check should go strictly before the check of binary ops if parser.is_in_ternary(): parser.finish_ternary(line, sym) else: parser.add_ternary(line, sym) elif sym in stl.BINARY_OPERATORS: if sym == "-" and (i == 0 or is_unary(self.tokens[i - 1])): parser.add_unary(line, "neg") elif sym == "*" and (i == 0 or is_unary(self.tokens[i - 1])): next_token = self.tokens[i + 1] if isinstance( next_token, stl.IdToken) and next_token.symbol == "*": parser.add_unary(line, "kw_unpack") i += 1 else: parser.add_unary(line, "unpack") else: parser.add_operator(line, sym) elif sym in stl.UNARY_OPERATORS: if sym == "!" or sym == "not": parser.add_unary(line, "!") else: print("Should not be here") elif sym[:-1] in stl.OP_EQ: parser.add_operator(line, sym, True) elif sym == "import": i += 2 name_token: stl.IdToken = self.tokens[i - 1] path_token: stl.IdToken = self.tokens[i] # print(name_token) import_name = name_token.symbol parser.add_import(line, import_name, path_token.symbol) import_braces.append(brace_count) elif token.is_eol(): if var_level != ast.ASSIGN: active = parser.get_active() und_vars = active.stack.copy() # print(und_vars) active.stack.clear() for node in und_vars: active.stack.append(node) parser.add_assignment(line, var_level) parser.add_undefined(line) parser.build_line() var_level = ast.ASSIGN parser.build_line() else: parser.add_name(line, sym) # auth = stl.PUBLIC elif isinstance(token, stl.NumToken): value = token.value parser.add_number(line, value) elif isinstance(token, stl.LiteralToken): value = token.text parser.add_literal(line, value) elif isinstance(token, stl.DocToken): pass elif token.is_eof(): parser.build_line() break else: stl.unexpected_token(token) i += 1 except stl.ParseException as e: raise e except Exception: raise stl.ParseException( "Parse error in '{}', at line {}".format( self.tokens[i].file_name(), self.tokens[i].line_number())) if par_count != 0 or len(call_nest_list) != 0 or len(cond_nest_list) != 0 or len(param_nest_list) or \ len(square_nest_list) != 0 or brace_count != 0: raise stl.ParseEOFException( "Reach the end while parsing, {},{},{},{}".format( par_count, call_nest_list, cond_nest_list, param_nest_list)) return parser.get_as_block()