Exemple #1
0
def test_sets():
    """ Ensure that we can tokenize a set. """
    objs = tokenize("#{1 2}")
    assert objs == [Set([Integer(1), Integer(2)])]
    objs = tokenize("(bar #{foo bar baz})")
    assert objs == [
        Expression([
            Symbol("bar"),
            Set([Symbol("foo"), Symbol("bar"),
                 Symbol("baz")])
        ])
    ]

    objs = tokenize("#{(foo bar) (baz quux)}")
    assert objs == [
        Set([
            Expression([Symbol("foo"), Symbol("bar")]),
            Expression([Symbol("baz"), Symbol("quux")])
        ])
    ]

    # Duplicate items in a literal set should be okay (and should
    # be preserved).
    objs = tokenize("#{1 2 1 1 2 1}")
    assert objs == [Set([Integer(n) for n in [1, 2, 1, 1, 2, 1]])]
    assert len(objs[0]) == 6

    # https://github.com/hylang/hy/issues/1120
    objs = tokenize("#{a 1}")
    assert objs == [Set([Symbol("a"), Integer(1)])]
Exemple #2
0
def test_discard():
    """Check that discarded terms are removed properly."""
    # empty
    assert tokenize("") == []
    # single
    assert tokenize("#_1") == []
    # multiple
    assert tokenize("#_1 #_2") == []
    assert tokenize("#_1 #_2 #_3") == []
    # nested discard
    assert tokenize("#_ #_1 2") == []
    assert tokenize("#_ #_ #_1 2 3") == []
    # trailing
    assert tokenize("0") == [Integer(0)]
    assert tokenize("0 #_1") == [Integer(0)]
    assert tokenize("0 #_1 #_2") == [Integer(0)]
    # leading
    assert tokenize("2") == [Integer(2)]
    assert tokenize("#_1 2") == [Integer(2)]
    assert tokenize("#_0 #_1 2") == [Integer(2)]
    assert tokenize("#_ #_0 1 2") == [Integer(2)]
    # both
    assert tokenize("#_1 2 #_3") == [Integer(2)]
    assert tokenize("#_0 #_1 2 #_ #_3 4") == [Integer(2)]
    # inside
    assert tokenize("0 #_1 2") == [Integer(0), Integer(2)]
    assert tokenize("0 #_1 #_2 3") == [Integer(0), Integer(3)]
    assert tokenize("0 #_ #_1 2 3") == [Integer(0), Integer(3)]
    # in List
    assert tokenize("[]") == [List([])]
    assert tokenize("[#_1]") == [List([])]
    assert tokenize("[#_1 #_2]") == [List([])]
    assert tokenize("[#_ #_1 2]") == [List([])]
    assert tokenize("[0]") == [List([Integer(0)])]
    assert tokenize("[0 #_1]") == [List([Integer(0)])]
    assert tokenize("[0 #_1 #_2]") == [List([Integer(0)])]
    assert tokenize("[2]") == [List([Integer(2)])]
    assert tokenize("[#_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_0 #_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_ #_0 1 2]") == [List([Integer(2)])]
    # in Set
    assert tokenize("#{}") == [Set()]
    assert tokenize("#{#_1}") == [Set()]
    assert tokenize("#{0 #_1}") == [Set([Integer(0)])]
    assert tokenize("#{#_1 0}") == [Set([Integer(0)])]
    # in Dict
    assert tokenize("{}") == [Dict()]
    assert tokenize("{#_1}") == [Dict()]
    assert tokenize("{#_0 1 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 #_0 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 2 #_0}") == [Dict([Integer(1), Integer(2)])]
    # in Expression
    assert tokenize("()") == [Expression()]
    assert tokenize("(#_foo)") == [Expression()]
    assert tokenize("(#_foo bar)") == [Expression([Symbol("bar")])]
    assert tokenize("(foo #_bar)") == [Expression([Symbol("foo")])]
    assert tokenize("(foo :bar 1)") == [
        Expression([Symbol("foo"), Keyword("bar"),
                    Integer(1)])
    ]
    assert tokenize("(foo #_:bar 1)") == [
        Expression([Symbol("foo"), Integer(1)])
    ]
    assert tokenize("(foo :bar #_1)") == [
        Expression([Symbol("foo"), Keyword("bar")])
    ]
    # discard term with nesting
    assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
        List([Integer(1), Integer(2),
              Integer(3), Integer(4)])
    ]
    # discard with other prefix syntax
    assert tokenize("a #_'b c") == [Symbol("a"), Symbol("c")]
    assert tokenize("a '#_b c") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("c")])
    ]
    assert tokenize("a '#_b #_c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
    assert tokenize("a '#_ #_b c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
Exemple #3
0
def empty_set(state, p):
    return Set([])
Exemple #4
0
def t_set(state, p):
    return Set(p[1])
Exemple #5
0
hydict = Dict(["a", 1, "b", 2, "c", 3])


def test_dict_items():
    assert hydict.items() == [("a", 1), ("b", 2), ("c", 3)]


def test_dict_keys():
    assert hydict.keys() == ["a", "b", "c"]


def test_dict_values():
    assert hydict.values() == [1, 2, 3]


hyset = Set([3, 1, 2, 2])


def test_set():
    assert list(hyset) == [3, 1, 2, 2]


def test_number_model_copy():
    i = Integer(42)
    assert (i == copy.copy(i))
    assert (i == copy.deepcopy(i))

    f = Float(42.)
    assert (f == copy.copy(f))
    assert (f == copy.deepcopy(f))
Exemple #6
0
def test_set():
    assert list(Set([3, 1, 2, 2])) == [3, 1, 2, 2]