示例#1
0
文件: test_lex.py 项目: kartikm/hy
def test_escapes():
    """ Ensure we can escape things """
    entry = tokenize(r"""(foo "foo\n")""")[0]
    assert entry[1] == "foo\n"

    entry = tokenize(r"""(foo r"foo\s")""")[0]
    assert entry[1] == r"foo\s"
示例#2
0
def test_escapes():
    """ Ensure we can escape things """
    entry = tokenize("(foo \"foo\\n\")")[0]
    assert entry[1] == "foo\n"

    entry = tokenize("(foo \"foo\s\")")[0]
    assert entry[1] == "foo\\s"
示例#3
0
文件: test_lex.py 项目: kartikm/hy
def test_complex():
    """Ensure we tokenize complex numbers properly"""
    # This is a regression test for #143
    entry = tokenize("(1j)")[0][0]
    assert entry == HyComplex("1.0j")
    entry = tokenize("(j)")[0][0]
    assert entry == HySymbol("j")
示例#4
0
文件: test_lex.py 项目: eal/hy
def test_lex_expression_complex():
    """ Make sure expressions can produce complex """
    objs = tokenize("(foo 2.j)")
    assert objs == [HyExpression([HySymbol("foo"), HyComplex(2.j)])]
    objs = tokenize("(foo -0.5j)")
    assert objs == [HyExpression([HySymbol("foo"), HyComplex(-0.5j)])]
    objs = tokenize("(foo 1.e7j)")
    assert objs == [HyExpression([HySymbol("foo"), HyComplex(1.e7j)])]
示例#5
0
文件: test_lex.py 项目: eal/hy
def test_lex_expression_float():
    """ Make sure expressions can produce floats """
    objs = tokenize("(foo 2.)")
    assert objs == [HyExpression([HySymbol("foo"), HyFloat(2.)])]
    objs = tokenize("(foo -0.5)")
    assert objs == [HyExpression([HySymbol("foo"), HyFloat(-0.5)])]
    objs = tokenize("(foo 1.e7)")
    assert objs == [HyExpression([HySymbol("foo"), HyFloat(1.e7)])]
示例#6
0
文件: test_lex.py 项目: MacTop/hy
def test_lex_mangling_star():
    """Ensure that mangling starred identifiers works according to plan"""
    entry = tokenize("*foo*")
    assert entry == [HySymbol("FOO")]
    entry = tokenize("*")
    assert entry == [HySymbol("*")]
    entry = tokenize("*foo")
    assert entry == [HySymbol("*foo")]
示例#7
0
文件: test_lex.py 项目: Tritlo/hy
def test_lex_single_quote_err():
    "Ensure tokenizing \"' \" throws a LexException that can be stringified"
    # https://github.com/hylang/hy/issues/1252
    try:
        tokenize("' ")
    except LexException as e:
        assert "Could not identify the next token" in str(e)
    else:
        assert False
示例#8
0
文件: test_lex.py 项目: kartikm/hy
def test_lex_bracket_strings():

    objs = tokenize("#[my delim[hello world]my delim]")
    assert objs == [HyString("hello world")]
    assert objs[0].brackets == "my delim"

    objs = tokenize("#[[squid]]")
    assert objs == [HyString("squid")]
    assert objs[0].brackets == ""
示例#9
0
文件: test_lex.py 项目: kartikm/hy
def test_lex_strings():
    """ Make sure that strings are valid expressions"""
    objs = tokenize('"foo"')
    assert objs == [HyString("foo")]
    # Make sure backslash-escaped newlines work (see issue #831)
    objs = tokenize(r"""
"a\
bc"
""")
    assert objs == [HyString("abc")]
示例#10
0
文件: test_lex.py 项目: eal/hy
def test_escapes():
    """ Ensure we can escape things """
    entry = tokenize("(foo \"foo\\n\")")[0]
    assert entry[1] == "foo\n"

    try:
        entry = tokenize("(foo \"foo\s\")")[0]
        assert True is False
    except LexException:
        pass
示例#11
0
文件: test_lex.py 项目: MacTop/hy
def test_cons_list():
    """Check that cons of something and a list gets tokenized as a list"""
    entry = tokenize("(a . [])")[0]
    assert entry == HyList([HySymbol("a")])
    assert type(entry) == HyList
    entry = tokenize("(a . ())")[0]
    assert entry == HyExpression([HySymbol("a")])
    assert type(entry) == HyExpression
    entry = tokenize("(a b . {})")[0]
    assert entry == HyDict([HySymbol("a"), HySymbol("b")])
    assert type(entry) == HyDict
示例#12
0
def test_preprocessor_expression():
    """ Test inner macro expantion """
    obj = process(tokenize('(test (test "one" "two"))')[0])

    assert type(obj) == HyList
    assert type(obj[0]) == HyList

    assert obj[0] == HyList([HyString("one"), HyString("two")])

    obj = HyList([HyString("one"), HyString("two")])
    obj = tokenize('(shill ["one" "two"])')[0][1]
    assert obj == process(obj)
示例#13
0
def test_preprocessor_expression():
    """ Test that macro expansion doesn't recurse"""
    obj = macroexpand(tokenize('(test (test "one" "two"))')[0], HyASTCompiler(__name__))

    assert type(obj) == HyList
    assert type(obj[0]) == HyExpression

    assert obj[0] == HyExpression([HySymbol("test"), HyString("one"), HyString("two")])

    obj = HyList([HyString("one"), HyString("two")])
    obj = tokenize('(shill ["one" "two"])')[0][1]
    assert obj == macroexpand(obj, HyASTCompiler(""))
示例#14
0
文件: test_lex.py 项目: eal/hy
def test_dicts():
    """ Ensure that we can tokenize a dict. """
    objs = tokenize("{foo bar bar baz}")
    assert objs == [HyDict({
        "foo": "bar",
        "bar": "baz"
    })]

    objs = tokenize("(bar {foo bar bar baz})")
    assert objs == [HyExpression([HySymbol("bar"),
                                  HyDict({"foo": "bar",
                                          "bar": "baz"})])]
示例#15
0
文件: test_lex.py 项目: ALSchwalm/hy
def test_sets():
    """ Ensure that we can tokenize a set. """
    objs = tokenize("#{1 2}")
    assert objs == [HySet([HyInteger(1), HyInteger(2)])]
    objs = tokenize("(bar #{foo bar baz})")
    assert objs == [HyExpression([HySymbol("bar"),
                                  HySet(["foo", "bar", "baz"])])]

    objs = tokenize("#{(foo bar) (baz quux)}")
    assert objs == [HySet([
        HyExpression([HySymbol("foo"), HySymbol("bar")]),
        HyExpression([HySymbol("baz"), HySymbol("quux")])
    ])]
示例#16
0
文件: test_lex.py 项目: eal/hy
def test_lex_exception():
    """ Ensure tokenize throws a fit on a partial input """
    try:
        tokenize("(foo")
        assert True is False
    except LexException:
        pass

    try:
        tokenize("&foo&")
        assert True is False
    except LexException:
        pass
示例#17
0
文件: test_lex.py 项目: eal/hy
def test_unbalanced_exception():
    """Ensure the tokenization fails on unbalanced expressions"""
    try:
        tokenize("(bar))")
        assert True is False
    except LexException:
        pass

    try:
        tokenize("(baz [quux]])")
        assert True is False
    except LexException:
        pass
示例#18
0
文件: test_lex.py 项目: kartikm/hy
def test_lex_nan_and_inf():

    assert isnan(tokenize("NaN")[0])
    assert tokenize("Nan") == [HySymbol("Nan")]
    assert tokenize("nan") == [HySymbol("nan")]
    assert tokenize("NAN") == [HySymbol("NAN")]

    assert tokenize("Inf") == [HyFloat(float("inf"))]
    assert tokenize("inf") == [HySymbol("inf")]
    assert tokenize("INF") == [HySymbol("INF")]

    assert tokenize("-Inf") == [HyFloat(float("-inf"))]
    assert tokenize("-inf") == [HySymbol("-inf")]
    assert tokenize("-INF") == [HySymbol("-INF")]
示例#19
0
文件: test_ast.py 项目: jd/hy
def test_ast_good_try():
    "Make sure AST can compile valid try"
    hy_compile(tokenize("(try)"))
    hy_compile(tokenize("(try 1)"))
    hy_compile(tokenize("(try 1 (except) (else 1))"))
    hy_compile(tokenize("(try 1 (else 1) (except))"))
    hy_compile(tokenize("(try 1 (finally 1) (except))"))
    hy_compile(tokenize("(try 1 (finally 1))"))
    hy_compile(tokenize("(try 1 (except) (finally 1))"))
    hy_compile(tokenize("(try 1 (except) (finally 1) (else 1))"))
    hy_compile(tokenize("(try 1 (except) (else 1) (finally 1))"))
示例#20
0
文件: test_lex.py 项目: kartikm/hy
def test_dicts():
    """ Ensure that we can tokenize a dict. """
    objs = tokenize("{foo bar bar baz}")
    assert objs == [HyDict(["foo", "bar", "bar", "baz"])]

    objs = tokenize("(bar {foo bar bar baz})")
    assert objs == [HyExpression([HySymbol("bar"),
                                  HyDict(["foo", "bar",
                                          "bar", "baz"])])]

    objs = tokenize("{(foo bar) (baz quux)}")
    assert objs == [HyDict([
        HyExpression([HySymbol("foo"), HySymbol("bar")]),
        HyExpression([HySymbol("baz"), HySymbol("quux")])
    ])]
示例#21
0
文件: test_lex.py 项目: kartikm/hy
def test_unicode_escapes():
    """Ensure unicode escapes are handled correctly"""
    s = r'"a\xac\u1234\u20ac\U00008000"'
    assert len(s) == 29
    entry = tokenize(s)[0]
    assert len(entry) == 5
    assert [ord(x) for x in entry] == [97, 172, 4660, 8364, 32768]
示例#22
0
文件: test_ast.py 项目: jd/hy
def test_ast_print():
    code = hy_compile(tokenize("(print \"foo\")")).body[0]

    if sys.version_info[0] >= 3:
        assert type(code.value) == ast.Call
        return
    assert type(code) == ast.Print
示例#23
0
文件: repl.py 项目: copyninja/HyREPL
    def eval(self, code):
        """Evals the given code.
           Returns a dict with reponses."""
        self.ret = []

        # We might catch errors when tokenizing code.
        try:
            tokens = tokenize(code)
        except:
            self._format_excp(sys.exc_info())
            return self.ret

        # Setting stdout in a way so we can catch anything printed and return that
        oldout = sys.stdout
        oldin = sys.stdin
        stdout = None
        sys.stdout = StringIO()

        for i in tokens:
            try:
                p = str(hy_eval(i, self.mod.__dict__, "__main__"))
            except:
                self._format_excp(sys.exc_info())
            else:
                self.ret.append({"value": p, "ns": 'None'})
                # If there is nothing in return, we see if anything is in stdout
                if p == "None":
                    stdout = sys.stdout.getvalue()
                    if stdout:
                        self.ret.append({'out': stdout})
        
        sys.stdout = oldout
        return self.ret
示例#24
0
def test_preprocessor_simple():
    """ Test basic macro expansion """
    obj = macroexpand(tokenize('(test "one" "two")')[0],
                      __name__,
                      HyASTCompiler(__name__))
    assert obj == HyList(["one", "two"])
    assert type(obj) == HyList
示例#25
0
文件: test_ast.py 项目: rogererens/hy
def cant_compile(expr):
    expr = tokenize(expr)
    try:
        hy_compile(expr)
        assert False
    except HyCompileError:
        pass
示例#26
0
 def hylang(self, line, cell=None, filename='<input>', symbol='single'):
     """ Ipython magic function for running hylang code in ipython
     Use %hylang one line of code or
         %%hylang for a block or cell
         Note that we pass the AST directly to IPython."""
     global SIMPLE_TRACEBACKS
     source = cell if cell else line
     try:
         tokens = tokenize(source)
     except PrematureEndOfInput:
         print( "Premature End of Input" )
     except LexException as e:
         if e.source is None:
             e.source = source
             e.filename = filename
         print(str(e))
     try:
         _ast = hy_compile(tokens, "__console__", root=ast.Interactive)
         self.shell.run_ast_nodes(_ast.body,'<input>',compiler=ast_compile)
     except HyTypeError as e:
         if e.source is None:
             e.source = source
             e.filename = filename
         if SIMPLE_TRACEBACKS:
             print(str(e))
         else:
             self.shell.showtraceback()
     except Exception:
         self.shell.showtraceback()
示例#27
0
def test_tag_macro_error():
    """Check if we get correct error with wrong dispatch character"""
    try:
        macroexpand(tokenize("(dispatch_tag_macro '- '())")[0],
                    HyASTCompiler(__name__))
    except HyTypeError as e:
        assert "with the character `-`" in str(e)
示例#28
0
文件: cmdline.py 项目: khinsen/hy
    def runsource(self, source, filename="<input>", symbol="single"):
        global SIMPLE_TRACEBACKS
        try:
            tokens = tokenize(source)
        except PrematureEndOfInput:
            return True
        except LexException as e:
            if e.source is None:
                e.source = source
                e.filename = filename
            sys.stderr.write(str(e))
            return False

        try:
            _ast = hy_compile(tokens, "__console__", root=ast.Interactive)
            if self.spy:
                print_python_code(_ast)
            code = ast_compile(_ast, filename, symbol)
        except HyTypeError as e:
            if e.source is None:
                e.source = source
                e.filename = filename
            if SIMPLE_TRACEBACKS:
                sys.stderr.write(str(e))
            else:
                self.showtraceback()
            return False
        except Exception:
            self.showtraceback()
            return False

        self.runcode(code)
        return False
示例#29
0
文件: test_lex.py 项目: MacTop/hy
def test_dotted_list():
    """Check that dotted lists get tokenized correctly"""
    entry = tokenize("(a b c . (d . e))")[0]
    assert entry == HyCons(HySymbol("a"),
                           HyCons(HySymbol("b"),
                                  HyCons(HySymbol("c"),
                                         HyCons(HySymbol("d"),
                                                HySymbol("e")))))
示例#30
0
def test_preprocessor_exceptions():
    """ Test that macro expansion raises appropriate exceptions"""
    try:
        macroexpand(tokenize('(defn)')[0], __name__)
        assert False
    except HyMacroExpansionError as e:
        assert "_hy_anon_fn_" not in str(e)
        assert "TypeError" not in str(e)
示例#31
0
def test_preprocessor_simple():
    """Test basic macro expansion"""
    obj = macroexpand(
        tokenize('(test "one" "two")')[0], __name__, HyASTCompiler(__name__))
    assert obj == List([String("one"), String("two")])
    assert type(obj) == List
示例#32
0
文件: importer.py 项目: mtmiller/hy
def import_buffer_to_hst(buf):
    """Import content from buf and return an Hy AST."""
    return tokenize(buf + "\n")
示例#33
0
    def t(x): return tokenize("(foo {})".format(x))

    def f(x): return [HyExpression([HySymbol("foo"), x])]
示例#34
0
文件: test_lex.py 项目: boguter/hy
def test_lex_expression_integer():
    """ Make sure expressions can produce integers """
    objs = tokenize("(foo 2)")
    assert objs == [HyExpression([HySymbol("foo"), HyInteger(2)])]
示例#35
0
文件: test_lex.py 项目: boguter/hy
def test_lex_expression_symbols():
    """ Make sure that expressions produce symbols """
    objs = tokenize("(foo bar)")
    assert objs == [HyExpression([HySymbol("foo"), HySymbol("bar")])]
示例#36
0
文件: test_lex.py 项目: boguter/hy
def test_lex_mangling_hyphen():
    """Ensure that hyphens get translated to underscores during mangling"""
    entry = tokenize("foo-bar")
    assert entry == [HySymbol("foo_bar")]
    entry = tokenize("-")
    assert entry == [HySymbol("-")]
示例#37
0
文件: test_lex.py 项目: boguter/hy
def test_reader_macro():
    """Ensure reader macros are handles properly"""
    entry = tokenize("#^()")
    assert entry[0][0] == HySymbol("dispatch_reader_macro")
    assert entry[0][1] == HyString("^")
    assert len(entry[0]) == 3
示例#38
0
文件: test_ast.py 项目: zenhack/hy
def can_compile(expr):
    return hy_compile(tokenize(expr), "__main__")
示例#39
0
文件: test_lex.py 项目: giruzou/hy
def test_lex_big_float():
    # https://github.com/hylang/hy/issues/1448
    assert tokenize("1e900") == [HyFloat(1e900)]
    assert tokenize("1e900-1e900j") == [HyComplex(1e900, -1e900)]
示例#40
0
文件: test_lex.py 项目: zenhack/hy
def test_lex_strings():
    """ Make sure that strings are valid expressions"""
    objs = tokenize("\"foo\" ")
    assert objs == [HyString("foo")]
示例#41
0
文件: test_ast.py 项目: munk/hy
 def f(x):
     return hy_compile(tokenize(x), "__main__").body[0].value.s
示例#42
0
文件: test_ast.py 项目: munk/hy
 def contains_import_from(code):
     return any([
         isinstance(node, ast.ImportFrom)
         for node in hy_compile(tokenize(code), "__main__").body
     ])
示例#43
0
def test_tag_macro():
    """Ensure tag macros are handled properly"""
    entry = tokenize("#^()")
    assert entry[0][0] == HySymbol("dispatch-tag-macro")
    assert entry[0][1] == HyString("^")
    assert len(entry[0]) == 3
示例#44
0
文件: test_lex.py 项目: giruzou/hy
def test_lex_bad_attrs():
    with lexe():
        tokenize("1.foo")
    with lexe():
        tokenize("0.foo")
    with lexe():
        tokenize("1.5.foo")
    with lexe():
        tokenize("1e3.foo")
    with lexe():
        tokenize("5j.foo")
    with lexe():
        tokenize("3+5j.foo")
    with lexe():
        tokenize("3.1+5.1j.foo")
    assert tokenize("j.foo")
    with lexe():
        tokenize("3/4.foo")
    assert tokenize("a/1.foo")
    assert tokenize("1/a.foo")
    with lexe():
        tokenize(":hello.foo")
示例#45
0
文件: test_lex.py 项目: boguter/hy
def test_hashbang():
    """ Ensure we can escape things """
    entry = tokenize("#!this is a comment\n")
    assert entry == []
示例#46
0
文件: test_lex.py 项目: giruzou/hy
def test_unbalanced_exception():
    """Ensure the tokenization fails on unbalanced expressions"""
    with lexe():
        tokenize("(bar))")
    with lexe():
        tokenize("(baz [quux]])")
示例#47
0
文件: test_lex.py 项目: boguter/hy
def test_lex_comment_382():
    """Ensure that we can tokenize sources with a comment at the end"""
    entry = tokenize("foo ;bar\n;baz")
    assert entry == [HySymbol("foo")]
示例#48
0
文件: test_lex.py 项目: giruzou/hy
def test_lex_single_quote_err():
    "Ensure tokenizing \"' \" throws a LexException that can be stringified"
    # https://github.com/hylang/hy/issues/1252
    with lexe() as e:
        tokenize("' ")
    assert "Could not identify the next token" in str(e.value)
示例#49
0
文件: test_lex.py 项目: boguter/hy
def test_simple_cons():
    """Check that cons gets tokenized correctly"""
    entry = tokenize("(a . b)")[0]
    assert entry == HyCons(HySymbol("a"), HySymbol("b"))
示例#50
0
文件: test_lex.py 项目: giruzou/hy
def test_discard():
    """Check that discarded terms are removed properly."""
    # empty
    assert tokenize("") == []
    # single
    assert tokenize("#_1") == []
    # multiple
    assert tokenize("#_1 #_2") == []
    assert tokenize("#_1 #_2 #_3") == []
    # nested discard
    assert tokenize("#_ #_1 2") == []
    assert tokenize("#_ #_ #_1 2 3") == []
    # trailing
    assert tokenize("0") == [0]
    assert tokenize("0 #_1") == [0]
    assert tokenize("0 #_1 #_2") == [0]
    # leading
    assert tokenize("2") == [2]
    assert tokenize("#_1 2") == [2]
    assert tokenize("#_0 #_1 2") == [2]
    assert tokenize("#_ #_0 1 2") == [2]
    # both
    assert tokenize("#_1 2 #_3") == [2]
    assert tokenize("#_0 #_1 2 #_ #_3 4") == [2]
    # inside
    assert tokenize("0 #_1 2") == [0, 2]
    assert tokenize("0 #_1 #_2 3") == [0, 3]
    assert tokenize("0 #_ #_1 2 3") == [0, 3]
    # in HyList
    assert tokenize("[]") == [HyList([])]
    assert tokenize("[#_1]") == [HyList([])]
    assert tokenize("[#_1 #_2]") == [HyList([])]
    assert tokenize("[#_ #_1 2]") == [HyList([])]
    assert tokenize("[0]") == [HyList([HyInteger(0)])]
    assert tokenize("[0 #_1]") == [HyList([HyInteger(0)])]
    assert tokenize("[0 #_1 #_2]") == [HyList([HyInteger(0)])]
    assert tokenize("[2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_1 2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_0 #_1 2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_ #_0 1 2]") == [HyList([HyInteger(2)])]
    # in HySet
    assert tokenize("#{}") == [HySet()]
    assert tokenize("#{#_1}") == [HySet()]
    assert tokenize("#{0 #_1}") == [HySet([HyInteger(0)])]
    assert tokenize("#{#_1 0}") == [HySet([HyInteger(0)])]
    # in HyDict
    assert tokenize("{}") == [HyDict()]
    assert tokenize("{#_1}") == [HyDict()]
    assert tokenize("{#_0 1 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
    assert tokenize("{1 #_0 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
    assert tokenize("{1 2 #_0}") == [HyDict([HyInteger(1), HyInteger(2)])]
    # in HyExpression
    assert tokenize("()") == [HyExpression()]
    assert tokenize("(#_foo)") == [HyExpression()]
    assert tokenize("(#_foo bar)") == [HyExpression([HySymbol("bar")])]
    assert tokenize("(foo #_bar)") == [HyExpression([HySymbol("foo")])]
    assert tokenize("(foo :bar 1)") == [
        HyExpression([HySymbol("foo"),
                      HyKeyword(":bar"),
                      HyInteger(1)])
    ]
    assert tokenize("(foo #_:bar 1)") == [
        HyExpression([HySymbol("foo"), HyInteger(1)])
    ]
    assert tokenize("(foo :bar #_1)") == [
        HyExpression([HySymbol("foo"), HyKeyword(":bar")])
    ]
    # discard term with nesting
    assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
        HyList([HyInteger(1),
                HyInteger(2),
                HyInteger(3),
                HyInteger(4)])
    ]
    # discard with other prefix syntax
    assert tokenize("a #_'b c") == [HySymbol("a"), HySymbol("c")]
    assert tokenize("a '#_b c") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("c")])
    ]
    assert tokenize("a '#_b #_c d") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("d")])
    ]
    assert tokenize("a '#_ #_b c d") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("d")])
    ]
示例#51
0
文件: test_lex.py 项目: boguter/hy
def test_lex_expression_strings():
    """ Test that expressions can produce strings """
    objs = tokenize("(foo \"bar\")")
    assert objs == [HyExpression([HySymbol("foo"), HyString("bar")])]
示例#52
0
文件: test_lex.py 项目: giruzou/hy
def test_lex_symbols():
    """ Make sure that symbols are valid expressions"""
    objs = tokenize("foo ")
    assert objs == [HySymbol("foo")]
示例#53
0
def test_lex_bad_attrs():
    with lexe() as execinfo:
        tokenize("1.foo")
    check_ex(execinfo, [
        '  File "<string>", line 1\n',
        '    1.foo\n',
        '    ^\n',
        'LexException: Cannot access attribute on anything other'
            ' than a name (in order to get attributes of expressions,'
            ' use `(. <expression> <attr>)` or `(.<attr> <expression>)`)\n'])

    with lexe(): tokenize("0.foo")
    with lexe(): tokenize("1.5.foo")
    with lexe(): tokenize("1e3.foo")
    with lexe(): tokenize("5j.foo")
    with lexe(): tokenize("3+5j.foo")
    with lexe(): tokenize("3.1+5.1j.foo")
    assert tokenize("j.foo")
    with lexe(): tokenize("3/4.foo")
    assert tokenize("a/1.foo")
    assert tokenize("1/a.foo")
    with lexe(): tokenize(":hello.foo")
示例#54
0
def test_preprocessor_simple():
    """ Test basic macro expansion """
    obj = macroexpand(tokenize('(test "one" "two")')[0], __name__)
    assert obj == HyList(["one", "two"])
    assert type(obj) == HyList
示例#55
0
def test_lex_fractions():
    """ Make sure that fractions are valid expressions"""
    objs = tokenize("1/2")
    assert objs == [HyExpression([HySymbol("fraction"), HyInteger(1),
                                  HyInteger(2)])]
示例#56
0
文件: test_lex.py 项目: boguter/hy
def test_lex_integers():
    """ Make sure that integers are valid expressions"""
    objs = tokenize("42 ")
    assert objs == [HyInteger(42)]
示例#57
0
def test_preprocessor_exceptions():
    """Test that macro expansion raises appropriate exceptions"""
    with pytest.raises(HyMacroExpansionError) as excinfo:
        macroexpand(tokenize("(when)")[0], __name__, HyASTCompiler(__name__))
    assert "_hy_anon_" not in excinfo.value.msg
示例#58
0
文件: test_lex.py 项目: boguter/hy
def test_lex_digit_separators():

    assert tokenize("1_000_000") == [HyInteger(1000000)]
    assert tokenize("1,000,000") == [HyInteger(1000000)]
    assert tokenize("1,000_000") == [HyInteger(1000000)]
    assert tokenize("1_000,000") == [HyInteger(1000000)]

    assert tokenize("0x_af") == [HyInteger(0xaf)]
    assert tokenize("0x,af") == [HyInteger(0xaf)]
    assert tokenize("0b_010") == [HyInteger(0b010)]
    assert tokenize("0b,010") == [HyInteger(0b010)]
    assert tokenize("0o_373") == [HyInteger(0o373)]
    assert tokenize("0o,373") == [HyInteger(0o373)]

    assert tokenize('1_2.3,4') == [HyFloat(12.34)]
    assert tokenize('1_2e3,4') == [HyFloat(12e34)]
    assert (tokenize("1,2/3_4") == [
        HyExpression([HySymbol("fraction"),
                      HyInteger(12),
                      HyInteger(34)])
    ])
    assert tokenize("1,0_00j") == [HyComplex(1000j)]

    assert tokenize(",,,,___,__1__,,__,,2__,,,__") == [HyInteger(12)]
    assert (tokenize(",,,,___,__1__,,__,,2__,q,__") == [
        HySymbol(",,,,___,__1__,,__,,2__,q,__")
    ])
示例#59
0
文件: test_lex.py 项目: giruzou/hy
 def t(x):
     return tokenize("(foo {})".format(x))
示例#60
0
def test_lex_exception():
    """ Ensure tokenize throws a fit on a partial input """
    with peoi(): tokenize("(foo")
    with peoi(): tokenize("{foo bar")
    with peoi(): tokenize("(defn foo [bar]")
    with peoi(): tokenize("(foo \"bar")