Ejemplo n.º 1
0
 def __init__(self, text, tokenizer, parser=BaseParser, diff_parser=None):
     self._pgen_grammar = generate_grammar(
         text, token_namespace=self._get_token_namespace())
     self._parser = parser
     self._tokenizer = tokenizer
     self._diff_parser = diff_parser
     self._hashed = hashlib.sha256(text.encode("utf-8")).hexdigest()
Ejemplo n.º 2
0
 def __init__(self, text, tokenizer, parser=BaseParser, diff_parser=None):
     self._pgen_grammar = generate_grammar(
         text,
         token_namespace=self._get_token_namespace()
     )
     self._parser = parser
     self._tokenizer = tokenizer
     self._diff_parser = diff_parser
     self._hashed = hashlib.sha256(text.encode("utf-8")).hexdigest()
Ejemplo n.º 3
0
def test_ambiguities():
    with pytest.raises(ValueError, match='ambiguous'):
        generate_grammar('foo: bar | baz\nbar: NAME\nbaz: NAME\n', tokenize.PythonTokenTypes)

    with pytest.raises(ValueError, match='ambiguous'):
        generate_grammar('''foo: bar | baz\nbar: 'x'\nbaz: "x"\n''', tokenize.PythonTokenTypes)

    with pytest.raises(ValueError, match='ambiguous'):
        generate_grammar('''foo: bar | 'x'\nbar: 'x'\n''', tokenize.PythonTokenTypes)
Ejemplo n.º 4
0
def test_left_recursion():
    with pytest.raises(ValueError, match='left recursion'):
        generate_grammar('foo: foo NAME\n', tokenize.PythonTokenTypes)
Ejemplo n.º 5
0
def test_ambiguities(grammar, error_match):
    with pytest.raises(ValueError, match=error_match):
        generate_grammar(grammar, tokenize.PythonTokenTypes)