コード例 #1
0
ファイル: test_lex.py プロジェクト: xmonader/hy
def test_dicts():
    """ Ensure that we can tokenize a dict. """
    objs = tokenize("{foo bar bar baz}")
    assert objs == [HyDict(["foo", "bar", "bar", "baz"])]

    objs = tokenize("(bar {foo bar bar baz})")
    assert objs == [HyExpression([HySymbol("bar"),
                                  HyDict(["foo", "bar",
                                          "bar", "baz"])])]

    objs = tokenize("{(foo bar) (baz quux)}")
    assert objs == [HyDict([
        HyExpression([HySymbol("foo"), HySymbol("bar")]),
        HyExpression([HySymbol("baz"), HySymbol("quux")])
    ])]
コード例 #2
0
ファイル: test_lex.py プロジェクト: shubhampachori12110095/hy
def test_cons_list():
    """Check that cons of something and a list gets tokenized as a list"""
    entry = tokenize("(a . [])")[0]
    assert entry == HyList([HySymbol("a")])
    assert type(entry) == HyList
    entry = tokenize("(a . ())")[0]
    assert entry == HyExpression([HySymbol("a")])
    assert type(entry) == HyExpression
    entry = tokenize("(a b . {})")[0]
    assert entry == HyDict([HySymbol("a"), HySymbol("b")])
    assert type(entry) == HyDict
コード例 #3
0
ファイル: parser.py プロジェクト: vulogov-pulsepoint/hy
def empty_dict(p):
    return HyDict([])
コード例 #4
0
ファイル: parser.py プロジェクト: vulogov-pulsepoint/hy
def t_dict(p):
    return HyDict(p[1])
コード例 #5
0
ファイル: test_lex.py プロジェクト: shubhampachori12110095/hy
def test_discard():
    """Check that discarded terms are removed properly."""
    # empty
    assert tokenize("") == []
    # single
    assert tokenize("#_1") == []
    # multiple
    assert tokenize("#_1 #_2") == []
    assert tokenize("#_1 #_2 #_3") == []
    # nested discard
    assert tokenize("#_ #_1 2") == []
    assert tokenize("#_ #_ #_1 2 3") == []
    # trailing
    assert tokenize("0") == [0]
    assert tokenize("0 #_1") == [0]
    assert tokenize("0 #_1 #_2") == [0]
    # leading
    assert tokenize("2") == [2]
    assert tokenize("#_1 2") == [2]
    assert tokenize("#_0 #_1 2") == [2]
    assert tokenize("#_ #_0 1 2") == [2]
    # both
    assert tokenize("#_1 2 #_3") == [2]
    assert tokenize("#_0 #_1 2 #_ #_3 4") == [2]
    # inside
    assert tokenize("0 #_1 2") == [0, 2]
    assert tokenize("0 #_1 #_2 3") == [0, 3]
    assert tokenize("0 #_ #_1 2 3") == [0, 3]
    # in HyList
    assert tokenize("[]") == [HyList([])]
    assert tokenize("[#_1]") == [HyList([])]
    assert tokenize("[#_1 #_2]") == [HyList([])]
    assert tokenize("[#_ #_1 2]") == [HyList([])]
    assert tokenize("[0]") == [HyList([HyInteger(0)])]
    assert tokenize("[0 #_1]") == [HyList([HyInteger(0)])]
    assert tokenize("[0 #_1 #_2]") == [HyList([HyInteger(0)])]
    assert tokenize("[2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_1 2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_0 #_1 2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_ #_0 1 2]") == [HyList([HyInteger(2)])]
    # in HySet
    assert tokenize("#{}") == [HySet()]
    assert tokenize("#{#_1}") == [HySet()]
    assert tokenize("#{0 #_1}") == [HySet([HyInteger(0)])]
    assert tokenize("#{#_1 0}") == [HySet([HyInteger(0)])]
    # in HyDict
    assert tokenize("{}") == [HyDict()]
    assert tokenize("{#_1}") == [HyDict()]
    assert tokenize("{#_0 1 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
    assert tokenize("{1 #_0 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
    assert tokenize("{1 2 #_0}") == [HyDict([HyInteger(1), HyInteger(2)])]
    # in HyExpression
    assert tokenize("()") == [HyExpression()]
    assert tokenize("(#_foo)") == [HyExpression()]
    assert tokenize("(#_foo bar)") == [HyExpression([HySymbol("bar")])]
    assert tokenize("(foo #_bar)") == [HyExpression([HySymbol("foo")])]
    assert tokenize("(foo :bar 1)") == [
        HyExpression([HySymbol("foo"),
                      HyKeyword(":bar"),
                      HyInteger(1)])
    ]
    assert tokenize("(foo #_:bar 1)") == [
        HyExpression([HySymbol("foo"), HyInteger(1)])
    ]
    assert tokenize("(foo :bar #_1)") == [
        HyExpression([HySymbol("foo"), HyKeyword(":bar")])
    ]
    # discard term with nesting
    assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
        HyList([HyInteger(1),
                HyInteger(2),
                HyInteger(3),
                HyInteger(4)])
    ]
    # discard with other prefix syntax
    assert tokenize("a #_'b c") == [HySymbol("a"), HySymbol("c")]
    assert tokenize("a '#_b c") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("c")])
    ]
    assert tokenize("a '#_b #_c d") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("d")])
    ]
    assert tokenize("a '#_ #_b c d") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("d")])
    ]
コード例 #6
0
ファイル: test_models.py プロジェクト: zequequiel/hy
    assert type(c) is HyList


def test_list_slice():
    """Check that slicing a HyList produces a HyList"""
    a = HyList([1, 2, 3, 4])
    sl1 = a[1:]
    sl5 = a[5:]

    assert type(sl1) == HyList
    assert sl1 == HyList([2, 3, 4])
    assert type(sl5) == HyList
    assert sl5 == HyList([])


hydict = HyDict(["a", 1, "b", 2, "c", 3])


def test_dict_items():
    assert hydict.items() == [("a", 1), ("b", 2), ("c", 3)]


def test_dict_keys():
    assert hydict.keys() == ["a", "b", "c"]


def test_dict_values():
    assert hydict.values() == [1, 2, 3]


hyset = HySet([3, 1, 2, 2])
コード例 #7
0
ファイル: parser.py プロジェクト: novolei/hy
def empty_dict(state, p):
    return HyDict([])
コード例 #8
0
ファイル: parser.py プロジェクト: novolei/hy
def t_dict(state, p):
    return HyDict(p[1])