Example #1
0
def test_sets():
    """ Ensure that we can tokenize a set. """
    objs = tokenize("#{1 2}")
    assert objs == [Set([Integer(1), Integer(2)])]
    objs = tokenize("(bar #{foo bar baz})")
    assert objs == [
        Expression([
            Symbol("bar"),
            Set([Symbol("foo"), Symbol("bar"),
                 Symbol("baz")])
        ])
    ]

    objs = tokenize("#{(foo bar) (baz quux)}")
    assert objs == [
        Set([
            Expression([Symbol("foo"), Symbol("bar")]),
            Expression([Symbol("baz"), Symbol("quux")])
        ])
    ]

    # Duplicate items in a literal set should be okay (and should
    # be preserved).
    objs = tokenize("#{1 2 1 1 2 1}")
    assert objs == [Set([Integer(n) for n in [1, 2, 1, 1, 2, 1]])]
    assert len(objs[0]) == 6

    # https://github.com/hylang/hy/issues/1120
    objs = tokenize("#{a 1}")
    assert objs == [Set([Symbol("a"), Integer(1)])]
Example #2
0
def test_lex_fractions():
    """ Make sure that fractions are valid expressions"""
    objs = tokenize("1/2")
    assert objs == [
        Expression([Symbol("hy._Fraction"),
                    Integer(1), Integer(2)])
    ]
Example #3
0
def test_compiler_yield_return():
    """
    Check that the compiler correctly generates return statements for
    a generator function. In Python versions prior to 3.3, the return
    statement in a generator can't take a value, so the final expression
    should not generate a return statement. From 3.3 onwards a return
    value should be generated.
    """
    e = make_expression(Symbol("fn"), List(),
                        Expression([Symbol("yield"),
                                    Integer(2)]),
                        Expression([Symbol("+"),
                                    Integer(1),
                                    Integer(1)]))
    ret = compiler.HyASTCompiler(types.ModuleType('test')).compile_atom(e)

    assert len(ret.stmts) == 1
    stmt, = ret.stmts
    assert isinstance(stmt, ast.FunctionDef)
    body = stmt.body
    assert len(body) == 2
    assert isinstance(body[0], ast.Expr)
    assert isinstance(body[0].value, ast.Yield)
    assert isinstance(body[1], ast.Return)
    assert isinstance(body[1].value, ast.BinOp)
Example #4
0
def symbol_like(obj):
    "Try to interpret `obj` as a number or keyword."

    try:
        return Integer(obj)
    except ValueError:
        pass

    if "/" in obj:
        try:
            lhs, rhs = obj.split("/")
            return Expression(
                [sym("hy._Fraction"),
                 Integer(lhs), Integer(rhs)])
        except ValueError:
            pass

    try:
        return Float(obj)
    except ValueError:
        pass

    if obj not in ("j", "J"):
        try:
            return Complex(obj)
        except ValueError:
            pass

    if obj.startswith(":") and "." not in obj:
        return Keyword(obj[1:], from_parser=True)
Example #5
0
def symbol_like(obj):
    "Try to interpret `obj` as a number or keyword."

    try:
        return Integer(obj)
    except ValueError:
        pass

    if '/' in obj:
        try:
            lhs, rhs = obj.split('/')
            return Expression(
                [Symbol('hy._Fraction'),
                 Integer(lhs),
                 Integer(rhs)])
        except ValueError:
            pass

    try:
        return Float(obj)
    except ValueError:
        pass

    if obj not in ('j', 'J'):
        try:
            return Complex(obj)
        except ValueError:
            pass

    if obj.startswith(":") and "." not in obj:
        return Keyword(obj[1:])
Example #6
0
def test_invalid_bracket_strings():
    for string, brackets in [("]foo]", "foo"), ("something ]f] else", "f")]:
        with pytest.raises(ValueError):
            String(string, brackets)
    for nodes, brackets in [
        ([String("hello"), String("world ]foo]")], "foo"),
        ([String("something"), FComponent([String("world")]), String("]f]")], "f"),
        ([String("something"), FComponent([Integer(1), String("]f]")])], "f"),
    ]:
        with pytest.raises(ValueError):
            FString(nodes, brackets=brackets)
Example #7
0
def test_number_model_copy():
    i = Integer(42)
    assert (i == copy.copy(i))
    assert (i == copy.deepcopy(i))

    f = Float(42.)
    assert (f == copy.copy(f))
    assert (f == copy.deepcopy(f))

    c = Complex(42j)
    assert (c == copy.copy(c))
    assert (c == copy.deepcopy(c))
Example #8
0
def test_number_model_copy():
    i = Integer(42)
    assert i == copy.copy(i)
    assert i == copy.deepcopy(i)

    f = Float(42.0)
    assert f == copy.copy(f)
    assert f == copy.deepcopy(f)

    c = Complex(42j)
    assert c == copy.copy(c)
    assert c == copy.deepcopy(c)
Example #9
0
def test_lex_digit_separators():

    assert tokenize("1_000_000") == [Integer(1000000)]
    assert tokenize("1,000,000") == [Integer(1000000)]
    assert tokenize("1,000_000") == [Integer(1000000)]
    assert tokenize("1_000,000") == [Integer(1000000)]

    assert tokenize("0x_af") == [Integer(0xaf)]
    assert tokenize("0x,af") == [Integer(0xaf)]
    assert tokenize("0b_010") == [Integer(0b010)]
    assert tokenize("0b,010") == [Integer(0b010)]
    assert tokenize("0o_373") == [Integer(0o373)]
    assert tokenize("0o,373") == [Integer(0o373)]

    assert tokenize('1_2.3,4') == [Float(12.34)]
    assert tokenize('1_2e3,4') == [Float(12e34)]
    assert (tokenize("1,2/3_4") == [
        Expression([Symbol("hy._Fraction"),
                    Integer(12),
                    Integer(34)])
    ])
    assert tokenize("1,0_00j") == [Complex(1000j)]

    assert tokenize("1,,,,___,____,,__,,2__,,,__") == [Integer(12)]
    assert (tokenize("_1,,,,___,____,,__,,2__,,,__") == [
        Symbol("_1,,,,___,____,,__,,2__,,,__")
    ])
    assert (tokenize("1,,,,___,____,,__,,2__,q,__") == [
        Symbol("1,,,,___,____,,__,,2__,q,__")
    ])
Example #10
0
def test_lex_integers():
    """ Make sure that integers are valid expressions"""
    objs = tokenize("42 ")
    assert objs == [Integer(42)]
Example #11
0
def test_wrap_tuple():
    wrapped = as_model((Integer(0), ))
    assert type(wrapped) == List
    assert type(wrapped[0]) == Integer
    assert wrapped == List([Integer(0)])
Example #12
0
def test_replace_int():
    """ Test replacing integers."""
    replaced = replace_hy_obj(0, Integer(13))
    assert replaced == Integer(0)
Example #13
0
def test_wrap_nested_expr():
    """ Test conversion of Expressions with embedded non-HyObjects."""
    wrapped = as_model(Expression([0]))
    assert type(wrapped) == Expression
    assert type(wrapped[0]) == Integer
    assert wrapped == Expression([Integer(0)])
Example #14
0
def test_wrap_tuple():
    """ Test conversion of tuples."""
    wrapped = as_model((Integer(0),))
    assert type(wrapped) == List
    assert type(wrapped[0]) == Integer
    assert wrapped == List([Integer(0)])
Example #15
0
def test_replace_int():
    replaced = replace_hy_obj(0, Integer(13))
    assert replaced == Integer(0)
Example #16
0
def test_discard():
    """Check that discarded terms are removed properly."""
    # empty
    assert tokenize("") == []
    # single
    assert tokenize("#_1") == []
    # multiple
    assert tokenize("#_1 #_2") == []
    assert tokenize("#_1 #_2 #_3") == []
    # nested discard
    assert tokenize("#_ #_1 2") == []
    assert tokenize("#_ #_ #_1 2 3") == []
    # trailing
    assert tokenize("0") == [Integer(0)]
    assert tokenize("0 #_1") == [Integer(0)]
    assert tokenize("0 #_1 #_2") == [Integer(0)]
    # leading
    assert tokenize("2") == [Integer(2)]
    assert tokenize("#_1 2") == [Integer(2)]
    assert tokenize("#_0 #_1 2") == [Integer(2)]
    assert tokenize("#_ #_0 1 2") == [Integer(2)]
    # both
    assert tokenize("#_1 2 #_3") == [Integer(2)]
    assert tokenize("#_0 #_1 2 #_ #_3 4") == [Integer(2)]
    # inside
    assert tokenize("0 #_1 2") == [Integer(0), Integer(2)]
    assert tokenize("0 #_1 #_2 3") == [Integer(0), Integer(3)]
    assert tokenize("0 #_ #_1 2 3") == [Integer(0), Integer(3)]
    # in List
    assert tokenize("[]") == [List([])]
    assert tokenize("[#_1]") == [List([])]
    assert tokenize("[#_1 #_2]") == [List([])]
    assert tokenize("[#_ #_1 2]") == [List([])]
    assert tokenize("[0]") == [List([Integer(0)])]
    assert tokenize("[0 #_1]") == [List([Integer(0)])]
    assert tokenize("[0 #_1 #_2]") == [List([Integer(0)])]
    assert tokenize("[2]") == [List([Integer(2)])]
    assert tokenize("[#_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_0 #_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_ #_0 1 2]") == [List([Integer(2)])]
    # in Set
    assert tokenize("#{}") == [Set()]
    assert tokenize("#{#_1}") == [Set()]
    assert tokenize("#{0 #_1}") == [Set([Integer(0)])]
    assert tokenize("#{#_1 0}") == [Set([Integer(0)])]
    # in Dict
    assert tokenize("{}") == [Dict()]
    assert tokenize("{#_1}") == [Dict()]
    assert tokenize("{#_0 1 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 #_0 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 2 #_0}") == [Dict([Integer(1), Integer(2)])]
    # in Expression
    assert tokenize("()") == [Expression()]
    assert tokenize("(#_foo)") == [Expression()]
    assert tokenize("(#_foo bar)") == [Expression([Symbol("bar")])]
    assert tokenize("(foo #_bar)") == [Expression([Symbol("foo")])]
    assert tokenize("(foo :bar 1)") == [
        Expression([Symbol("foo"), Keyword("bar"),
                    Integer(1)])
    ]
    assert tokenize("(foo #_:bar 1)") == [
        Expression([Symbol("foo"), Integer(1)])
    ]
    assert tokenize("(foo :bar #_1)") == [
        Expression([Symbol("foo"), Keyword("bar")])
    ]
    # discard term with nesting
    assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
        List([Integer(1), Integer(2),
              Integer(3), Integer(4)])
    ]
    # discard with other prefix syntax
    assert tokenize("a #_'b c") == [Symbol("a"), Symbol("c")]
    assert tokenize("a '#_b c") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("c")])
    ]
    assert tokenize("a '#_b #_c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
    assert tokenize("a '#_ #_b c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
Example #17
0
def test_lex_expression_integer():
    """ Make sure expressions can produce integers """
    objs = tokenize("(foo 2)")
    assert objs == [Expression([Symbol("foo"), Integer(2)])]
Example #18
0
def test_replace_tuple():
    """ Test replacing tuples."""
    replaced = replace_hy_obj((0, ), Integer(13))
    assert type(replaced) == List
    assert type(replaced[0]) == Integer
    assert replaced == List([Integer(0)])