def error_expected_token_type(self, token_types): if not isinstance(token_types, (tuple, list)): token_types = (token_types, ) generate_error( 'Parser', 'expected token {}, got {}'.format( ' or '.join(str(token) for token in token_types), self.token.type), self.token.line, self.token.column)
def invalid_type_error(token: Token, type_: str, expected_type: Union[str, List, Tuple]): if isinstance(expected_type, (list, tuple)): expected_type = ' or '.join(expected_type) generate_error( 'Semantic', 'Invalid type, expected {}, got {}'.format(expected_type, type_), token.line, token.column)
def next_token(self): while True: # handle dedents on stack if self.dedent_count: self.dedent_count -= 1 self.token = Token(Token.DEDENT, self.indent_stack.pop(), self.line_num, self.mo.start() - self.line_start + 1) return self.token type_ = self.mo.lastgroup value = self.mo.group(type_) # handle blank after line continuation if self.token.type == Token.LINE_CONTINUATION and type_ == Token.INDENT: type_ = Token.BLANK # handle new dedents if self.token.type == Token.NEWLINE and type_ not in ( Token.INDENT, Token.BLANK, Token.NEWLINE): if self.indent_stack: self.dedent_count = len(self.indent_stack) continue # handle new indents and dedents if type_ == Token.INDENT: if not self.indent_stack: self.indent_stack.append(value) else: last_indent = self.indent_stack[-1] if value == last_indent: type_ = Token.BLANK elif value.startswith(last_indent): self.indent_stack.append(value) else: if last_indent.startswith(value): for idx, indent in enumerate(self.indent_stack): if value == indent: self.dedent_count = len( self.indent_stack) - idx - 1 break if self.dedent_count: continue else: type_ = Token.INDENTATION_ERROR self.token = Token(getattr(Token, type_), value, self.line_num, self.mo.start() - self.line_start + 1) if type_ in (Token.NEWLINE, Token.LINE_CONTINUATION): self.line_start = self.mo.end() self.line_num += 1 if type_ == Token.COMMENT and '\n' in value: self.line_start = self.mo.start() + value.rfind('\n') + 1 self.line_num += value.count('\n') if type_ == Token.UNDEFINED_TOKEN: generate_error( 'Lexer', "Undefined token {}".format(repr(self.token.value)), self.token.line, self.token.column) if type_ == Token.INDENTATION_ERROR: generate_error('Lexer', 'Indentation error', self.token.line, self.token.column) self.mo = self.rg.match(self.text, self.mo.end()) if type_ not in (Token.BLANK, Token.COMMENT, Token.LINE_CONTINUATION, Token.UNDEFINED_TOKEN, Token.INDENTATION_ERROR): # handling newlines at the beginning of the file or after not-returned tokens if type_ == Token.NEWLINE and self.newline_was_returned: continue elif type_ == Token.NEWLINE: self.newline_was_returned = True else: self.newline_was_returned = False return self.token
def tape_literal_multile_heads_error(token: Token, head_count: int): generate_error( 'Semantic', 'Number of heads should be 0 or 1, got {}'.format(head_count), token.line, token.column)
def tape_literal_length_error(token: Token): generate_error('Semantic', 'Tape literal length should greater than 0, got 0', token.line, token.column)
def symbol_literal_length_error(token: Token, length: int): generate_error( 'Semantic', 'Symbol literal length should be 1, got {}'.format(length), token.line, token.column)
def integer_literal_out_of_range_error(token: Token, value: int): generate_error( 'Semantic', 'Integer literal is out of range, should be in [-32768,32767], got {}' .format(value), token.line, token.column)
def undeclared_variable_error(token: Token, variable_name: str): generate_error('Semantic', 'Undeclared variable {}'.format(variable_name), token.line, token.column)
def incompatible_types_error(token: Token, type1: str, type2: str): generate_error('Semantic', 'Incompatible types {} and {}'.format(type1, type2), token.line, token.column)