def test_tokens(token, input_string): print(input_string) lexer = hcl.lexer.Lexer() lexer.input(input_string) lex_tok = lexer.token() if lex_tok is None: assert token is None else: assert token == lex_tok.type assert lexer.token() is None
def test_multi_line_comments_extract(token, input_string): print(input_string) lexer = hcl.lexer.Lexer(export_comments='ALL') lexer.input(input_string) lex_tok = lexer.token() if lex_tok is None: assert token is None else: assert token == lex_tok.type assert lexer.token() is None
def test_lexer_errors(hcl_fname, tokens, error_loc): if hcl_fname.endswith('.hcl'): with open(join(LEX_FIXTURE_DIR, hcl_fname), 'r') as fp: input = fp.read() else: input = hcl_fname print(input) lexer = hcl.lexer.Lexer() lexer.input(input) for tok in tokens: try: lex_tok = lexer.token() except ValueError as e: assert tok is Error assert error_loc == str(e) return if lex_tok is None: assert tok is None else: assert tok == lex_tok.type print(lex_tok)
def test_complex_tokens(tokens, input_string): print(input_string) lexer = hcl.lexer.Lexer() lexer.input(input_string) for tok in tokens: lex_tok = lexer.token() if lex_tok is None: assert tok is None else: assert tok == lex_tok.type print(lex_tok) assert lexer.token() is None
def test_lexer(hcl_fname, tokens): with open(join(LEX_FIXTURE_DIR, hcl_fname), 'r') as fp: input = fp.read() print(input) lexer = hcl.lexer.Lexer() lexer.input(input) for tok in tokens: lex_tok = lexer.token() if lex_tok is None: assert tok is None else: assert tok == lex_tok.type print(lex_tok) assert lexer.token() is None
def test_lexer(hcl_fname, tokens): if hcl_fname.endswith('.hcl'): with open(join(LEX_FIXTURE_DIR, hcl_fname), 'r') as fp: input = fp.read() else: input = hcl_fname print(input) lexer = hcl.lexer.Lexer() lexer.input(input) for expectedToken in tokens: lex_tok = lexer.token() if lex_tok is None: assert expectedToken is None else: assert expectedToken == lex_tok.type print(lex_tok) assert lexer.token() is None