コード例 #1
0
def test_lex_digit_separators():

    assert tokenize("1_000_000") == [Integer(1000000)]
    assert tokenize("1,000,000") == [Integer(1000000)]
    assert tokenize("1,000_000") == [Integer(1000000)]
    assert tokenize("1_000,000") == [Integer(1000000)]

    assert tokenize("0x_af") == [Integer(0xaf)]
    assert tokenize("0x,af") == [Integer(0xaf)]
    assert tokenize("0b_010") == [Integer(0b010)]
    assert tokenize("0b,010") == [Integer(0b010)]
    assert tokenize("0o_373") == [Integer(0o373)]
    assert tokenize("0o,373") == [Integer(0o373)]

    assert tokenize('1_2.3,4') == [Float(12.34)]
    assert tokenize('1_2e3,4') == [Float(12e34)]
    assert (tokenize("1,2/3_4") == [
        Expression([Symbol("hy._Fraction"),
                    Integer(12),
                    Integer(34)])
    ])
    assert tokenize("1,0_00j") == [Complex(1000j)]

    assert tokenize("1,,,,___,____,,__,,2__,,,__") == [Integer(12)]
    assert (tokenize("_1,,,,___,____,,__,,2__,,,__") == [
        Symbol("_1,,,,___,____,,__,,2__,,,__")
    ])
    assert (tokenize("1,,,,___,____,,__,,2__,q,__") == [
        Symbol("1,,,,___,____,,__,,2__,q,__")
    ])
コード例 #2
0
def test_compiler_yield_return():
    """
    Check that the compiler correctly generates return statements for
    a generator function. In Python versions prior to 3.3, the return
    statement in a generator can't take a value, so the final expression
    should not generate a return statement. From 3.3 onwards a return
    value should be generated.
    """
    e = make_expression(Symbol("fn"), List(),
                        Expression([Symbol("yield"),
                                    Integer(2)]),
                        Expression([Symbol("+"),
                                    Integer(1),
                                    Integer(1)]))
    ret = compiler.HyASTCompiler(types.ModuleType('test')).compile_atom(e)

    assert len(ret.stmts) == 1
    stmt, = ret.stmts
    assert isinstance(stmt, ast.FunctionDef)
    body = stmt.body
    assert len(body) == 2
    assert isinstance(body[0], ast.Expr)
    assert isinstance(body[0].value, ast.Yield)
    assert isinstance(body[1], ast.Return)
    assert isinstance(body[1].value, ast.BinOp)
コード例 #3
0
def test_lex_expression_float():
    """ Make sure expressions can produce floats """
    objs = tokenize("(foo 2.)")
    assert objs == [Expression([Symbol("foo"), Float(2.)])]
    objs = tokenize("(foo -0.5)")
    assert objs == [Expression([Symbol("foo"), Float(-0.5)])]
    objs = tokenize("(foo 1.e7)")
    assert objs == [Expression([Symbol("foo"), Float(1.e7)])]
コード例 #4
0
def test_symbol_or_keyword():
    for x in ("foo", "foo-bar", "foo_bar", "✈é😂⁂"):
        assert str(Symbol(x)) == x
        assert Keyword(x).name == x
    for x in ("", ":foo", "5"):
        with pytest.raises(ValueError): Symbol(x)
        assert Keyword(x).name == x
    for x in ("foo bar", "fib()"):
        with pytest.raises(ValueError): Symbol(x)
        with pytest.raises(ValueError): Keyword(x)
コード例 #5
0
def test_sets():
    """ Ensure that we can tokenize a set. """
    objs = tokenize("#{1 2}")
    assert objs == [Set([Integer(1), Integer(2)])]
    objs = tokenize("(bar #{foo bar baz})")
    assert objs == [
        Expression([
            Symbol("bar"),
            Set([Symbol("foo"), Symbol("bar"),
                 Symbol("baz")])
        ])
    ]

    objs = tokenize("#{(foo bar) (baz quux)}")
    assert objs == [
        Set([
            Expression([Symbol("foo"), Symbol("bar")]),
            Expression([Symbol("baz"), Symbol("quux")])
        ])
    ]

    # Duplicate items in a literal set should be okay (and should
    # be preserved).
    objs = tokenize("#{1 2 1 1 2 1}")
    assert objs == [Set([Integer(n) for n in [1, 2, 1, 1, 2, 1]])]
    assert len(objs[0]) == 6

    # https://github.com/hylang/hy/issues/1120
    objs = tokenize("#{a 1}")
    assert objs == [Set([Symbol("a"), Integer(1)])]
コード例 #6
0
def test_complex():
    """Ensure we tokenize complex numbers properly"""
    # This is a regression test for #143
    entry = tokenize("(1j)")[0][0]
    assert entry == Complex("1.0j")
    entry = tokenize("(1J)")[0][0]
    assert entry == Complex("1.0j")
    entry = tokenize("(j)")[0][0]
    assert entry == Symbol("j")
    entry = tokenize("(J)")[0][0]
    assert entry == Symbol("J")
コード例 #7
0
ファイル: cmdline.py プロジェクト: rajp152k/hy
def ideas_macro(ETname):
    return Expression([
        Symbol('print'),
        String(r"""

    => (import [sh [figlet]])
    => (figlet "Hi, Hy!")
     _   _ _     _   _       _
    | | | (_)   | | | |_   _| |
    | |_| | |   | |_| | | | | |
    |  _  | |_  |  _  | |_| |_|
    |_| |_|_( ) |_| |_|\__, (_)
            |/         |___/


;;; string things
(.join ", " ["what" "the" "heck"])


;;; this one plays with command line bits
(import [sh [cat grep]])
(-> (cat "/usr/share/dict/words") (grep "-E" "bro$"))


;;; filtering a list w/ a lambda
(filter (fn [x] (= (% x 2) 0)) (range 0 10))


;;; swaggin' functional bits (Python rulez)
(max (map (fn [x] (len x)) ["hi" "my" "name" "is" "paul"]))

""")
    ])
コード例 #8
0
ファイル: compiler.py プロジェクト: stjordanis/hy
    def compile_symbol(self, symbol):
        if "." in symbol:
            glob, local = symbol.rsplit(".", 1)

            if not glob:
                raise self._syntax_error(
                    symbol,
                    'cannot access attribute on anything other than a name (in order to get attributes of expressions, use `(. <expression> {attr})` or `(.{attr} <expression>)`)'
                    .format(attr=local))

            if not local:
                raise self._syntax_error(symbol,
                                         'cannot access empty attribute')

            glob = Symbol(glob).replace(symbol)
            ret = self.compile_symbol(glob)

            return asty.Attribute(symbol,
                                  value=ret,
                                  attr=mangle(local),
                                  ctx=ast.Load())

        if mangle(symbol) in ("None", "False", "True"):
            return asty.Constant(symbol,
                                 value=ast.literal_eval(mangle(symbol)))

        return asty.Name(symbol, id=mangle(symbol), ctx=ast.Load())
コード例 #9
0
def symbol_like(obj):
    "Try to interpret `obj` as a number or keyword."

    try:
        return Integer(obj)
    except ValueError:
        pass

    if '/' in obj:
        try:
            lhs, rhs = obj.split('/')
            return Expression(
                [Symbol('hy._Fraction'),
                 Integer(lhs),
                 Integer(rhs)])
        except ValueError:
            pass

    try:
        return Float(obj)
    except ValueError:
        pass

    if obj not in ('j', 'J'):
        try:
            return Complex(obj)
        except ValueError:
            pass

    if obj.startswith(":") and "." not in obj:
        return Keyword(obj[1:])
コード例 #10
0
def test_lex_fractions():
    """ Make sure that fractions are valid expressions"""
    objs = tokenize("1/2")
    assert objs == [
        Expression([Symbol("hy._Fraction"),
                    Integer(1), Integer(2)])
    ]
コード例 #11
0
            def wrapper(hy_compiler, *args):

                if shadow and any(is_unpack("iterable", x) for x in args):
                    # Try a shadow function call with this name instead.
                    return Expression([Symbol("hy.pyops." + name),
                                       *args]).replace(hy_compiler.this)

                expr = hy_compiler.this
                root = unmangle(expr[0])

                if py_version_required and sys.version_info < py_version_required:
                    raise hy_compiler._syntax_error(
                        expr,
                        "`{}` requires Python {} or later".format(
                            root, ".".join(map(str, py_version_required))),
                    )

                try:
                    parse_tree = pattern.parse(args)
                except NoParseError as e:
                    raise hy_compiler._syntax_error(
                        expr[min(e.state.pos + 1,
                                 len(expr) - 1)],
                        "parse error for pattern macro '{}': {}".format(
                            root, e.msg.replace("<EOF>", "end of form")),
                    )
                return fn(hy_compiler, expr, root, *parse_tree)
コード例 #12
0
def test_compiler_bare_names():
    """
    Check that the compiler doesn't drop bare names from code branches
    """
    e = make_expression(Symbol("do"), Symbol("a"), Symbol("b"), Symbol("c"))
    ret = compiler.HyASTCompiler(types.ModuleType('test')).compile(e)

    # We expect two statements and a final expr.

    assert len(ret.stmts) == 2
    for stmt, symbol in zip(ret.stmts, "ab"):
        assert isinstance(stmt, ast.Expr)
        assert isinstance(stmt.value, ast.Name)
        assert stmt.value.id == symbol

    assert isinstance(ret.expr, ast.Name)
    assert ret.expr.id == "c"
コード例 #13
0
def test_macroexpand_source_data():
    # https://github.com/hylang/hy/issues/1944
    ast = Expression([Symbol("#@"), String("a")])
    ast.start_line = 3
    ast.start_column = 5
    bad = macroexpand_1(ast, "hy.core.macros")
    assert bad.start_line == 3
    assert bad.start_column == 5
コード例 #14
0
def test_lex_expression_complex():
    """ Make sure expressions can produce complex """
    def t(x):
        return tokenize("(foo {})".format(x))

    def f(x):
        return [Expression([Symbol("foo"), x])]

    assert t("2.j") == f(Complex(2.j))
    assert t("-0.5j") == f(Complex(-0.5j))
    assert t("1.e7j") == f(Complex(1e7j))
    assert t("j") == f(Symbol("j"))
    assert t("J") == f(Symbol("J"))
    assert isnan(t("NaNj")[0][1].imag)
    assert t("nanj") == f(Symbol("nanj"))
    assert t("Inf+Infj") == f(Complex(complex(float("inf"), float("inf"))))
    assert t("Inf-Infj") == f(Complex(complex(float("inf"), float("-inf"))))
    assert t("Inf-INFj") == f(Symbol("Inf-INFj"))
コード例 #15
0
ファイル: test_lex.py プロジェクト: rajp152k/hy
def test_dicts():
    """ Ensure that we can tokenize a dict. """
    objs = tokenize("{foo bar bar baz}")
    assert objs == [Dict(["foo", "bar", "bar", "baz"])]

    objs = tokenize("(bar {foo bar bar baz})")
    assert objs == [
        Expression([Symbol("bar"),
                    Dict(["foo", "bar", "bar", "baz"])])
    ]

    objs = tokenize("{(foo bar) (baz quux)}")
    assert objs == [
        Dict([
            Expression([Symbol("foo"), Symbol("bar")]),
            Expression([Symbol("baz"), Symbol("quux")])
        ])
    ]
コード例 #16
0
def term_hashstars(state, p):
    n_stars = len(p[0].getstr()[1:])
    if n_stars == 1:
        sym = "unpack-iterable"
    elif n_stars == 2:
        sym = "unpack-mapping"
    else:
        raise LexException.from_lexer(
            "Too many stars in `#*` construct (if you want to unpack a symbol "
            "beginning with a star, separate it with whitespace)", state, p[0])
    return Expression([Symbol(sym), p[1]])
コード例 #17
0
ファイル: scoping.py プロジェクト: allison-casey/hy
 def add(self, target, new_name=None):
     """Add a new let-binding target, mapped to a new, unique name."""
     if isinstance(target, (str, Symbol)):
         if "." in target:
             raise ValueError("binding target may not contain a dot")
         name = mangle(target)
         if new_name is None:
             new_name = self.compiler.get_anon_var(f"_hy_let_{name}")
         self.bindings[name] = new_name
         if isinstance(target, Symbol):
             return Symbol(new_name).replace(target)
         return new_name
     if new_name is not None:
         raise ValueError("cannot specify name for compound targets")
     if isinstance(target, List):
         return List(map(self.add, target)).replace(target)
     if (
         isinstance(target, Expression)
         and target
         and target[0] in (Symbol(","), Symbol("unpack-iterable"))
     ):
         return Expression([target[0], *map(self.add, target[1:])]).replace(target)
     raise ValueError(f"invalid binding target: {type(target)}")
コード例 #18
0
def test_preprocessor_expression():
    """Test that macro expansion doesn't recurse"""
    obj = macroexpand(
        tokenize('(test (test "one" "two"))')[0], __name__,
        HyASTCompiler(__name__))

    assert type(obj) == List
    assert type(obj[0]) == Expression

    assert obj[0] == Expression([Symbol("test"), String("one"), String("two")])

    obj = List([String("one"), String("two")])
    obj = tokenize('(shill ["one" "two"])')[0][1]
    assert obj == macroexpand(obj, __name__, HyASTCompiler(__name__))
コード例 #19
0
ファイル: cmdline.py プロジェクト: etanol/hy
def koan_macro(ETname):
    return Expression([Symbol('print'),
                       String("""
  Ummon asked the head monk, "What sutra are you lecturing on?"
  "The Nirvana Sutra."
  "The Nirvana Sutra has the Four Virtues, hasn't it?"
  "It has."
  Ummon asked, picking up a cup, "How many virtues has this?"
  "None at all," said the monk.
  "But ancient people said it had, didn't they?" said Ummon.
  "What do you think of what they said?"
  Ummon struck the cup and asked, "You understand?"
  "No," said the monk.
  "Then," said Ummon, "You'd better go on with your lectures on the sutra."
""")])
コード例 #20
0
def hy_parse(source, filename="<string>"):
    """Parse a Hy source string.

    Args:
      source (str): Source code to parse.
      filename (str): File name corresponding to source.  Defaults to "<string>".

    Returns:
      Expression: the parsed models wrapped in an hy.models.Expression
    """
    _source = re.sub(r"\A#!.*", "", source)
    res = Expression([Symbol("do")] + tokenize(_source + "\n", filename=filename))
    res.source = source
    res.filename = filename
    return res
コード例 #21
0
def test_lex_nan_and_inf():

    assert isnan(tokenize("NaN")[0])
    assert tokenize("Nan") == [Symbol("Nan")]
    assert tokenize("nan") == [Symbol("nan")]
    assert tokenize("NAN") == [Symbol("NAN")]

    assert tokenize("Inf") == [Float(float("inf"))]
    assert tokenize("inf") == [Symbol("inf")]
    assert tokenize("INF") == [Symbol("INF")]

    assert tokenize("-Inf") == [Float(float("-inf"))]
    assert tokenize("-inf") == [Symbol("-inf")]
    assert tokenize("-INF") == [Symbol("-INF")]
コード例 #22
0
def t_identifier(state, p):
    obj = p[0].value

    val = symbol_like(obj)
    if val is not None:
        return val

    if "." in obj and symbol_like(obj.split(".", 1)[0]) is not None:
        # E.g., `5.attr` or `:foo.attr`
        raise LexException.from_lexer(
            'Cannot access attribute on anything other than a name (in '
            'order to get attributes of expressions, use '
            '`(. <expression> <attr>)` or `(.<attr> <expression>)`)', state,
            p[0])

    return Symbol(obj)
コード例 #23
0
def hy_parse(source, filename='<string>'):
    """Parse a Hy source string.

    Args:
      source (string): Source code to parse.
      filename (string, optional): File name corresponding to source.  Defaults to "<string>".

    Returns:
      out : hy.models.Expression
    """
    _source = re.sub(r'\A#!.*', '', source)
    res = Expression([Symbol("do")] +
                       tokenize(_source + "\n",
                                filename=filename))
    res.source = source
    res.filename = filename
    return res
コード例 #24
0
def test_lex_expression_integer():
    """ Make sure expressions can produce integers """
    objs = tokenize("(foo 2)")
    assert objs == [Expression([Symbol("foo"), Integer(2)])]
コード例 #25
0
def test_lex_symbols():
    """ Make sure that symbols are valid expressions"""
    objs = tokenize("foo ")
    assert objs == [Symbol("foo")]
コード例 #26
0
def test_lex_expression_strings():
    """ Test that expressions can produce strings """
    objs = tokenize("(foo \"bar\")")
    assert objs == [Expression([Symbol("foo"), String("bar")])]
コード例 #27
0
def test_lex_expression_symbols():
    """ Make sure that expressions produce symbols """
    objs = tokenize("(foo bar)")
    assert objs == [Expression([Symbol("foo"), Symbol("bar")])]
コード例 #28
0
def test_discard():
    """Check that discarded terms are removed properly."""
    # empty
    assert tokenize("") == []
    # single
    assert tokenize("#_1") == []
    # multiple
    assert tokenize("#_1 #_2") == []
    assert tokenize("#_1 #_2 #_3") == []
    # nested discard
    assert tokenize("#_ #_1 2") == []
    assert tokenize("#_ #_ #_1 2 3") == []
    # trailing
    assert tokenize("0") == [Integer(0)]
    assert tokenize("0 #_1") == [Integer(0)]
    assert tokenize("0 #_1 #_2") == [Integer(0)]
    # leading
    assert tokenize("2") == [Integer(2)]
    assert tokenize("#_1 2") == [Integer(2)]
    assert tokenize("#_0 #_1 2") == [Integer(2)]
    assert tokenize("#_ #_0 1 2") == [Integer(2)]
    # both
    assert tokenize("#_1 2 #_3") == [Integer(2)]
    assert tokenize("#_0 #_1 2 #_ #_3 4") == [Integer(2)]
    # inside
    assert tokenize("0 #_1 2") == [Integer(0), Integer(2)]
    assert tokenize("0 #_1 #_2 3") == [Integer(0), Integer(3)]
    assert tokenize("0 #_ #_1 2 3") == [Integer(0), Integer(3)]
    # in List
    assert tokenize("[]") == [List([])]
    assert tokenize("[#_1]") == [List([])]
    assert tokenize("[#_1 #_2]") == [List([])]
    assert tokenize("[#_ #_1 2]") == [List([])]
    assert tokenize("[0]") == [List([Integer(0)])]
    assert tokenize("[0 #_1]") == [List([Integer(0)])]
    assert tokenize("[0 #_1 #_2]") == [List([Integer(0)])]
    assert tokenize("[2]") == [List([Integer(2)])]
    assert tokenize("[#_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_0 #_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_ #_0 1 2]") == [List([Integer(2)])]
    # in Set
    assert tokenize("#{}") == [Set()]
    assert tokenize("#{#_1}") == [Set()]
    assert tokenize("#{0 #_1}") == [Set([Integer(0)])]
    assert tokenize("#{#_1 0}") == [Set([Integer(0)])]
    # in Dict
    assert tokenize("{}") == [Dict()]
    assert tokenize("{#_1}") == [Dict()]
    assert tokenize("{#_0 1 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 #_0 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 2 #_0}") == [Dict([Integer(1), Integer(2)])]
    # in Expression
    assert tokenize("()") == [Expression()]
    assert tokenize("(#_foo)") == [Expression()]
    assert tokenize("(#_foo bar)") == [Expression([Symbol("bar")])]
    assert tokenize("(foo #_bar)") == [Expression([Symbol("foo")])]
    assert tokenize("(foo :bar 1)") == [
        Expression([Symbol("foo"), Keyword("bar"),
                    Integer(1)])
    ]
    assert tokenize("(foo #_:bar 1)") == [
        Expression([Symbol("foo"), Integer(1)])
    ]
    assert tokenize("(foo :bar #_1)") == [
        Expression([Symbol("foo"), Keyword("bar")])
    ]
    # discard term with nesting
    assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
        List([Integer(1), Integer(2),
              Integer(3), Integer(4)])
    ]
    # discard with other prefix syntax
    assert tokenize("a #_'b c") == [Symbol("a"), Symbol("c")]
    assert tokenize("a '#_b c") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("c")])
    ]
    assert tokenize("a '#_b #_c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
    assert tokenize("a '#_ #_b c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
コード例 #29
0
def test_lex_comment_382():
    """Ensure that we can tokenize sources with a comment at the end"""
    entry = tokenize("foo ;bar\n;baz")
    assert entry == [Symbol("foo")]
コード例 #30
0
def test_tag_macro():
    """Ensure tag macros are handled properly"""
    entry = tokenize("#^()")
    assert entry[0][0] == Symbol("#^")
    assert len(entry[0]) == 2