Example #1
0
def test_list_add():
    """Check that adding two Lists generates a List"""
    a = List([1, 2, 3])
    b = List([3, 4, 5])
    c = a + b
    assert c == List([1, 2, 3, 3, 4, 5])
    assert type(c) is List
Example #2
0
def test_list_slice():
    """Check that slicing a List produces a List"""
    a = List([1, 2, 3, 4])
    sl1 = a[1:]
    sl5 = a[5:]

    assert type(sl1) == List
    assert sl1 == List([2, 3, 4])
    assert type(sl5) == List
    assert sl5 == List([])
Example #3
0
def test_compiler_yield_return():
    """
    Check that the compiler correctly generates return statements for
    a generator function. In Python versions prior to 3.3, the return
    statement in a generator can't take a value, so the final expression
    should not generate a return statement. From 3.3 onwards a return
    value should be generated.
    """
    e = make_expression(Symbol("fn"), List(),
                        Expression([Symbol("yield"),
                                    Integer(2)]),
                        Expression([Symbol("+"),
                                    Integer(1),
                                    Integer(1)]))
    ret = compiler.HyASTCompiler(types.ModuleType('test')).compile_atom(e)

    assert len(ret.stmts) == 1
    stmt, = ret.stmts
    assert isinstance(stmt, ast.FunctionDef)
    body = stmt.body
    assert len(body) == 2
    assert isinstance(body[0], ast.Expr)
    assert isinstance(body[0].value, ast.Yield)
    assert isinstance(body[1], ast.Return)
    assert isinstance(body[1].value, ast.BinOp)
Example #4
0
def test_preprocessor_simple():
    """ Test basic macro expansion """
    obj = macroexpand(tokenize('(test "one" "two")')[0],
                      __name__,
                      HyASTCompiler(__name__))
    assert obj == List(["one", "two"])
    assert type(obj) == List
Example #5
0
def test_preprocessor_expression():
    """Test that macro expansion doesn't recurse"""
    obj = macroexpand(
        tokenize('(test (test "one" "two"))')[0], __name__,
        HyASTCompiler(__name__))

    assert type(obj) == List
    assert type(obj[0]) == Expression

    assert obj[0] == Expression([Symbol("test"), String("one"), String("two")])

    obj = List([String("one"), String("two")])
    obj = tokenize('(shill ["one" "two"])')[0][1]
    assert obj == macroexpand(obj, __name__, HyASTCompiler(__name__))
Example #6
0
 def add(self, target, new_name=None):
     """Add a new let-binding target, mapped to a new, unique name."""
     if isinstance(target, (str, Symbol)):
         if "." in target:
             raise ValueError("binding target may not contain a dot")
         name = mangle(target)
         if new_name is None:
             new_name = self.compiler.get_anon_var(f"_hy_let_{name}")
         self.bindings[name] = new_name
         if isinstance(target, Symbol):
             return Symbol(new_name).replace(target)
         return new_name
     if new_name is not None:
         raise ValueError("cannot specify name for compound targets")
     if isinstance(target, List):
         return List(map(self.add, target)).replace(target)
     if (
         isinstance(target, Expression)
         and target
         and target[0] in (Symbol(","), Symbol("unpack-iterable"))
     ):
         return Expression([target[0], *map(self.add, target[1:])]).replace(target)
     raise ValueError(f"invalid binding target: {type(target)}")
Example #7
0
def test_discard():
    """Check that discarded terms are removed properly."""
    # empty
    assert tokenize("") == []
    # single
    assert tokenize("#_1") == []
    # multiple
    assert tokenize("#_1 #_2") == []
    assert tokenize("#_1 #_2 #_3") == []
    # nested discard
    assert tokenize("#_ #_1 2") == []
    assert tokenize("#_ #_ #_1 2 3") == []
    # trailing
    assert tokenize("0") == [Integer(0)]
    assert tokenize("0 #_1") == [Integer(0)]
    assert tokenize("0 #_1 #_2") == [Integer(0)]
    # leading
    assert tokenize("2") == [Integer(2)]
    assert tokenize("#_1 2") == [Integer(2)]
    assert tokenize("#_0 #_1 2") == [Integer(2)]
    assert tokenize("#_ #_0 1 2") == [Integer(2)]
    # both
    assert tokenize("#_1 2 #_3") == [Integer(2)]
    assert tokenize("#_0 #_1 2 #_ #_3 4") == [Integer(2)]
    # inside
    assert tokenize("0 #_1 2") == [Integer(0), Integer(2)]
    assert tokenize("0 #_1 #_2 3") == [Integer(0), Integer(3)]
    assert tokenize("0 #_ #_1 2 3") == [Integer(0), Integer(3)]
    # in List
    assert tokenize("[]") == [List([])]
    assert tokenize("[#_1]") == [List([])]
    assert tokenize("[#_1 #_2]") == [List([])]
    assert tokenize("[#_ #_1 2]") == [List([])]
    assert tokenize("[0]") == [List([Integer(0)])]
    assert tokenize("[0 #_1]") == [List([Integer(0)])]
    assert tokenize("[0 #_1 #_2]") == [List([Integer(0)])]
    assert tokenize("[2]") == [List([Integer(2)])]
    assert tokenize("[#_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_0 #_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_ #_0 1 2]") == [List([Integer(2)])]
    # in Set
    assert tokenize("#{}") == [Set()]
    assert tokenize("#{#_1}") == [Set()]
    assert tokenize("#{0 #_1}") == [Set([Integer(0)])]
    assert tokenize("#{#_1 0}") == [Set([Integer(0)])]
    # in Dict
    assert tokenize("{}") == [Dict()]
    assert tokenize("{#_1}") == [Dict()]
    assert tokenize("{#_0 1 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 #_0 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 2 #_0}") == [Dict([Integer(1), Integer(2)])]
    # in Expression
    assert tokenize("()") == [Expression()]
    assert tokenize("(#_foo)") == [Expression()]
    assert tokenize("(#_foo bar)") == [Expression([Symbol("bar")])]
    assert tokenize("(foo #_bar)") == [Expression([Symbol("foo")])]
    assert tokenize("(foo :bar 1)") == [
        Expression([Symbol("foo"), Keyword("bar"),
                    Integer(1)])
    ]
    assert tokenize("(foo #_:bar 1)") == [
        Expression([Symbol("foo"), Integer(1)])
    ]
    assert tokenize("(foo :bar #_1)") == [
        Expression([Symbol("foo"), Keyword("bar")])
    ]
    # discard term with nesting
    assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
        List([Integer(1), Integer(2),
              Integer(3), Integer(4)])
    ]
    # discard with other prefix syntax
    assert tokenize("a #_'b c") == [Symbol("a"), Symbol("c")]
    assert tokenize("a '#_b c") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("c")])
    ]
    assert tokenize("a '#_b #_c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
    assert tokenize("a '#_ #_b c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
Example #8
0
def t_empty_list(state, p):
    return List([])
Example #9
0
def t_list(state, p):
    return List(p[1])
Example #10
0
def test_replace_tuple():
    """ Test replacing tuples."""
    replaced = replace_hy_obj((0, ), Integer(13))
    assert type(replaced) == List
    assert type(replaced[0]) == Integer
    assert replaced == List([Integer(0)])
Example #11
0
def test_wrap_tuple():
    """ Test conversion of tuples."""
    wrapped = as_model((Integer(0),))
    assert type(wrapped) == List
    assert type(wrapped[0]) == Integer
    assert wrapped == List([Integer(0)])
Example #12
0
def tmac(ETname, *tree):
    """Turn an expression into a list"""
    return List(tree)
Example #13
0
def test_wrap_tuple():
    wrapped = as_model((Integer(0), ))
    assert type(wrapped) == List
    assert type(wrapped[0]) == Integer
    assert wrapped == List([Integer(0)])