def test_symbol_or_keyword(): for x in ("foo", "foo-bar", "foo_bar", "✈é😂⁂"): assert str(Symbol(x)) == x assert Keyword(x).name == x for x in ("", ":foo", "5"): with pytest.raises(ValueError): Symbol(x) assert Keyword(x).name == x for x in ("foo bar", "fib()"): with pytest.raises(ValueError): Symbol(x) with pytest.raises(ValueError): Keyword(x)
def symbol_like(obj): "Try to interpret `obj` as a number or keyword." try: return Integer(obj) except ValueError: pass if '/' in obj: try: lhs, rhs = obj.split('/') return Expression( [Symbol('hy._Fraction'), Integer(lhs), Integer(rhs)]) except ValueError: pass try: return Float(obj) except ValueError: pass if obj not in ('j', 'J'): try: return Complex(obj) except ValueError: pass if obj.startswith(":") and "." not in obj: return Keyword(obj[1:])
def symbol_like(obj): "Try to interpret `obj` as a number or keyword." try: return Integer(obj) except ValueError: pass if "/" in obj: try: lhs, rhs = obj.split("/") return Expression( [sym("hy._Fraction"), Integer(lhs), Integer(rhs)]) except ValueError: pass try: return Float(obj) except ValueError: pass if obj not in ("j", "J"): try: return Complex(obj) except ValueError: pass if obj.startswith(":") and "." not in obj: return Keyword(obj[1:], from_parser=True)
def test_discard(): """Check that discarded terms are removed properly.""" # empty assert tokenize("") == [] # single assert tokenize("#_1") == [] # multiple assert tokenize("#_1 #_2") == [] assert tokenize("#_1 #_2 #_3") == [] # nested discard assert tokenize("#_ #_1 2") == [] assert tokenize("#_ #_ #_1 2 3") == [] # trailing assert tokenize("0") == [Integer(0)] assert tokenize("0 #_1") == [Integer(0)] assert tokenize("0 #_1 #_2") == [Integer(0)] # leading assert tokenize("2") == [Integer(2)] assert tokenize("#_1 2") == [Integer(2)] assert tokenize("#_0 #_1 2") == [Integer(2)] assert tokenize("#_ #_0 1 2") == [Integer(2)] # both assert tokenize("#_1 2 #_3") == [Integer(2)] assert tokenize("#_0 #_1 2 #_ #_3 4") == [Integer(2)] # inside assert tokenize("0 #_1 2") == [Integer(0), Integer(2)] assert tokenize("0 #_1 #_2 3") == [Integer(0), Integer(3)] assert tokenize("0 #_ #_1 2 3") == [Integer(0), Integer(3)] # in List assert tokenize("[]") == [List([])] assert tokenize("[#_1]") == [List([])] assert tokenize("[#_1 #_2]") == [List([])] assert tokenize("[#_ #_1 2]") == [List([])] assert tokenize("[0]") == [List([Integer(0)])] assert tokenize("[0 #_1]") == [List([Integer(0)])] assert tokenize("[0 #_1 #_2]") == [List([Integer(0)])] assert tokenize("[2]") == [List([Integer(2)])] assert tokenize("[#_1 2]") == [List([Integer(2)])] assert tokenize("[#_0 #_1 2]") == [List([Integer(2)])] assert tokenize("[#_ #_0 1 2]") == [List([Integer(2)])] # in Set assert tokenize("#{}") == [Set()] assert tokenize("#{#_1}") == [Set()] assert tokenize("#{0 #_1}") == [Set([Integer(0)])] assert tokenize("#{#_1 0}") == [Set([Integer(0)])] # in Dict assert tokenize("{}") == [Dict()] assert tokenize("{#_1}") == [Dict()] assert tokenize("{#_0 1 2}") == [Dict([Integer(1), Integer(2)])] assert tokenize("{1 #_0 2}") == [Dict([Integer(1), Integer(2)])] assert tokenize("{1 2 #_0}") == [Dict([Integer(1), Integer(2)])] # in Expression assert tokenize("()") == [Expression()] assert tokenize("(#_foo)") == [Expression()] assert tokenize("(#_foo bar)") == [Expression([Symbol("bar")])] assert tokenize("(foo #_bar)") == [Expression([Symbol("foo")])] assert tokenize("(foo :bar 1)") == [ Expression([Symbol("foo"), Keyword("bar"), Integer(1)]) ] assert tokenize("(foo #_:bar 1)") == [ Expression([Symbol("foo"), Integer(1)]) ] assert tokenize("(foo :bar #_1)") == [ Expression([Symbol("foo"), Keyword("bar")]) ] # discard term with nesting assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [ List([Integer(1), Integer(2), Integer(3), Integer(4)]) ] # discard with other prefix syntax assert tokenize("a #_'b c") == [Symbol("a"), Symbol("c")] assert tokenize("a '#_b c") == [ Symbol("a"), Expression([Symbol("quote"), Symbol("c")]) ] assert tokenize("a '#_b #_c d") == [ Symbol("a"), Expression([Symbol("quote"), Symbol("d")]) ] assert tokenize("a '#_ #_b c d") == [ Symbol("a"), Expression([Symbol("quote"), Symbol("d")]) ]
def _sym(wanted, f=lambda x: x): if wanted.startswith(":"): return f(a(Keyword(wanted[1:]))) return f(some(lambda x: x == Symbol(wanted)))
def sym(wanted): "Parse and skip the given symbol or keyword." if wanted.startswith(":"): return skip(a(Keyword(wanted[1:]))) return skip(some(lambda x: x == Symbol(wanted)))