def __init__( self, yacc_optimize=True, yacc_table="xonsh.completion_parser_table", debug=False, outputdir=None, ): self.cursor = 0 self.current_input = "" self.line_indices = () self.paren_counts = defaultdict(int) self.error = None self.debug = debug self.lexer = Lexer(tolerant=True) self.tokens = tuple(self.used_tokens | self.artificial_tokens) yacc_kwargs = dict( module=self, debug=debug, optimize=yacc_optimize, tabmodule=yacc_table, ) if not debug: yacc_kwargs["errorlog"] = yacc.NullLogger() if outputdir is None: outputdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) yacc_kwargs["outputdir"] = outputdir # create parser on main thread, it's small and should be fast self.parser = yacc.yacc(**yacc_kwargs)
def check_token(inp, exp): l = Lexer() l.input(inp) obs = list(l) if len(obs) != 1: msg = 'The observed sequence does not have length-1: {0!r} != 1\n' msg += '# obs\n{1}' pytest.fail(msg.format(len(obs), pformat(obs))) return assert_token_equal(exp, obs[0])
def check_token(input, exp): l = Lexer() l.input(input) obs = list(l) if len(obs) != 1: msg = 'The observed sequence does not have length-1: {0!r} != 1\n' msg += '# obs\n{1}' raise AssertionError(msg.format(len(obs), pformat(obs))) assert_token_equal(exp, obs[0])
swap_values, get_line_continuation, get_logical_line, replace_logical_line, check_quotes, deprecated, is_writable_file, balanced_parens, iglobpath, all_permutations, ) from xonsh.environ import Env from tools import skip_if_on_windows LEXER = Lexer() LEXER.build() INDENT = " " TOOLS_ENV = {"EXPAND_ENV_VARS": True, "XONSH_ENCODING_ERRORS": "strict"} ENCODE_ENV_ONLY = {"XONSH_ENCODING_ERRORS": "strict"} PATHEXT_ENV = {"PATHEXT": [".COM", ".EXE", ".BAT"]} def test_subproc_toks_x(): exp = "![x]" obs = subproc_toks("x", lexer=LEXER, returnline=True) assert exp == obs
def check_tokens_subproc(inp, exp): l = Lexer() l.input('$[{}]'.format(inp)) obs = list(l)[1:-1] return assert_tokens_equal(exp, obs)
def check_tokens(inp, exp): l = Lexer() l.input(inp) obs = list(l) return assert_tokens_equal(exp, obs)
def check_tokens_subproc(inp, exp, stop=-1): l = Lexer() l.input("$[{}]".format(inp)) obs = list(l)[1:stop] return assert_tokens_equal(exp, obs)
def test_lexer_split(s, exp): lexer = Lexer() obs = lexer.split(s) assert exp == obs
def test_redir_whitespace(case): inp = "![{}/path/to/file]".format(case) l = Lexer() l.input(inp) obs = list(l) assert obs[2].type == "WS"
def lex_input(inp: str): lex = Lexer() lex.input(inp) return list(lex)
def test_tolerant_lexer(s): lexer = Lexer(tolerant=True) lexer.input(s) error_tokens = list(tok for tok in lexer if tok.type == "ERRORTOKEN") assert all(tok.value in s for tok in error_tokens) # no error messages
def check_tokens(input, exp): l = Lexer() l.input(input) obs = list(l) assert_tokens_equal(exp, obs)