Example #1
0
def test_dicts():
    """ Ensure that we can tokenize a dict. """
    objs = tokenize("{foo bar bar baz}")
    assert objs == [
        Dict([Symbol("foo"),
              Symbol("bar"),
              Symbol("bar"),
              Symbol("baz")])
    ]

    objs = tokenize("(bar {foo bar bar baz})")
    assert objs == [
        Expression([
            Symbol("bar"),
            Dict([Symbol("foo"),
                  Symbol("bar"),
                  Symbol("bar"),
                  Symbol("baz")])
        ])
    ]

    objs = tokenize("{(foo bar) (baz quux)}")
    assert objs == [
        Dict([
            Expression([Symbol("foo"), Symbol("bar")]),
            Expression([Symbol("baz"), Symbol("quux")])
        ])
    ]
Example #2
0
def test_sets():
    """ Ensure that we can tokenize a set. """
    objs = tokenize("#{1 2}")
    assert objs == [Set([Integer(1), Integer(2)])]
    objs = tokenize("(bar #{foo bar baz})")
    assert objs == [
        Expression([
            Symbol("bar"),
            Set([Symbol("foo"), Symbol("bar"),
                 Symbol("baz")])
        ])
    ]

    objs = tokenize("#{(foo bar) (baz quux)}")
    assert objs == [
        Set([
            Expression([Symbol("foo"), Symbol("bar")]),
            Expression([Symbol("baz"), Symbol("quux")])
        ])
    ]

    # Duplicate items in a literal set should be okay (and should
    # be preserved).
    objs = tokenize("#{1 2 1 1 2 1}")
    assert objs == [Set([Integer(n) for n in [1, 2, 1, 1, 2, 1]])]
    assert len(objs[0]) == 6

    # https://github.com/hylang/hy/issues/1120
    objs = tokenize("#{a 1}")
    assert objs == [Set([Symbol("a"), Integer(1)])]
Example #3
0
def test_compiler_yield_return():
    """
    Check that the compiler correctly generates return statements for
    a generator function. In Python versions prior to 3.3, the return
    statement in a generator can't take a value, so the final expression
    should not generate a return statement. From 3.3 onwards a return
    value should be generated.
    """
    e = make_expression(Symbol("fn"), List(),
                        Expression([Symbol("yield"),
                                    Integer(2)]),
                        Expression([Symbol("+"),
                                    Integer(1),
                                    Integer(1)]))
    ret = compiler.HyASTCompiler(types.ModuleType('test')).compile_atom(e)

    assert len(ret.stmts) == 1
    stmt, = ret.stmts
    assert isinstance(stmt, ast.FunctionDef)
    body = stmt.body
    assert len(body) == 2
    assert isinstance(body[0], ast.Expr)
    assert isinstance(body[0].value, ast.Yield)
    assert isinstance(body[1], ast.Return)
    assert isinstance(body[1].value, ast.BinOp)
Example #4
0
def test_macroexpand_source_data():
    # https://github.com/hylang/hy/issues/1944
    ast = Expression([Symbol("#@"), String("a")])
    ast.start_line = 3
    ast.start_column = 5
    bad = macroexpand_1(ast, "hy.core.macros")
    assert bad.start_line == 3
    assert bad.start_column == 5
Example #5
0
def test_lex_expression_float():
    """ Make sure expressions can produce floats """
    objs = tokenize("(foo 2.)")
    assert objs == [Expression([Symbol("foo"), Float(2.)])]
    objs = tokenize("(foo -0.5)")
    assert objs == [Expression([Symbol("foo"), Float(-0.5)])]
    objs = tokenize("(foo 1.e7)")
    assert objs == [Expression([Symbol("foo"), Float(1.e7)])]
Example #6
0
def hy_parse(source, filename="<string>"):
    """Parse a Hy source string.

    Args:
      source (str): Source code to parse.
      filename (str): File name corresponding to source.  Defaults to "<string>".

    Returns:
      Expression: the parsed models wrapped in an hy.models.Expression
    """
    _source = re.sub(r"\A#!.*", "", source)
    res = Expression([Symbol("do")] + tokenize(_source + "\n", filename=filename))
    res.source = source
    res.filename = filename
    return res
Example #7
0
def ideas_macro(ETname):
    return Expression([
        Symbol('print'),
        String(r"""

    => (import [sh [figlet]])
    => (figlet "Hi, Hy!")
     _   _ _     _   _       _
    | | | (_)   | | | |_   _| |
    | |_| | |   | |_| | | | | |
    |  _  | |_  |  _  | |_| |_|
    |_| |_|_( ) |_| |_|\__, (_)
            |/         |___/


;;; string things
(.join ", " ["what" "the" "heck"])


;;; this one plays with command line bits
(import [sh [cat grep]])
(-> (cat "/usr/share/dict/words") (grep "-E" "bro$"))


;;; filtering a list w/ a lambda
(filter (fn [x] (= (% x 2) 0)) (range 0 10))


;;; swaggin' functional bits (Python rulez)
(max (map (fn [x] (len x)) ["hi" "my" "name" "is" "paul"]))

""")
    ])
Example #8
0
def symbol_like(obj):
    "Try to interpret `obj` as a number or keyword."

    try:
        return Integer(obj)
    except ValueError:
        pass

    if '/' in obj:
        try:
            lhs, rhs = obj.split('/')
            return Expression(
                [Symbol('hy._Fraction'),
                 Integer(lhs),
                 Integer(rhs)])
        except ValueError:
            pass

    try:
        return Float(obj)
    except ValueError:
        pass

    if obj not in ('j', 'J'):
        try:
            return Complex(obj)
        except ValueError:
            pass

    if obj.startswith(":") and "." not in obj:
        return Keyword(obj[1:])
Example #9
0
def symbol_like(obj):
    "Try to interpret `obj` as a number or keyword."

    try:
        return Integer(obj)
    except ValueError:
        pass

    if "/" in obj:
        try:
            lhs, rhs = obj.split("/")
            return Expression(
                [sym("hy._Fraction"),
                 Integer(lhs), Integer(rhs)])
        except ValueError:
            pass

    try:
        return Float(obj)
    except ValueError:
        pass

    if obj not in ("j", "J"):
        try:
            return Complex(obj)
        except ValueError:
            pass

    if obj.startswith(":") and "." not in obj:
        return Keyword(obj[1:], from_parser=True)
Example #10
0
def test_lex_fractions():
    """ Make sure that fractions are valid expressions"""
    objs = tokenize("1/2")
    assert objs == [
        Expression([Symbol("hy._Fraction"),
                    Integer(1), Integer(2)])
    ]
Example #11
0
            def wrapper(hy_compiler, *args):

                if shadow and any(is_unpack("iterable", x) for x in args):
                    # Try a shadow function call with this name instead.
                    return Expression([Symbol("hy.pyops." + name),
                                       *args]).replace(hy_compiler.this)

                expr = hy_compiler.this
                root = unmangle(expr[0])

                if py_version_required and sys.version_info < py_version_required:
                    raise hy_compiler._syntax_error(
                        expr,
                        "`{}` requires Python {} or later".format(
                            root, ".".join(map(str, py_version_required))),
                    )

                try:
                    parse_tree = pattern.parse(args)
                except NoParseError as e:
                    raise hy_compiler._syntax_error(
                        expr[min(e.state.pos + 1,
                                 len(expr) - 1)],
                        "parse error for pattern macro '{}': {}".format(
                            root, e.msg.replace("<EOF>", "end of form")),
                    )
                return fn(hy_compiler, expr, root, *parse_tree)
Example #12
0
def test_lex_digit_separators():

    assert tokenize("1_000_000") == [Integer(1000000)]
    assert tokenize("1,000,000") == [Integer(1000000)]
    assert tokenize("1,000_000") == [Integer(1000000)]
    assert tokenize("1_000,000") == [Integer(1000000)]

    assert tokenize("0x_af") == [Integer(0xaf)]
    assert tokenize("0x,af") == [Integer(0xaf)]
    assert tokenize("0b_010") == [Integer(0b010)]
    assert tokenize("0b,010") == [Integer(0b010)]
    assert tokenize("0o_373") == [Integer(0o373)]
    assert tokenize("0o,373") == [Integer(0o373)]

    assert tokenize('1_2.3,4') == [Float(12.34)]
    assert tokenize('1_2e3,4') == [Float(12e34)]
    assert (tokenize("1,2/3_4") == [
        Expression([Symbol("hy._Fraction"),
                    Integer(12),
                    Integer(34)])
    ])
    assert tokenize("1,0_00j") == [Complex(1000j)]

    assert tokenize("1,,,,___,____,,__,,2__,,,__") == [Integer(12)]
    assert (tokenize("_1,,,,___,____,,__,,2__,,,__") == [
        Symbol("_1,,,,___,____,,__,,2__,,,__")
    ])
    assert (tokenize("1,,,,___,____,,__,,2__,q,__") == [
        Symbol("1,,,,___,____,,__,,2__,q,__")
    ])
Example #13
0
def hy_parse(source, filename='<string>'):
    """Parse a Hy source string.

    Args:
      source (string): Source code to parse.
      filename (string, optional): File name corresponding to source.  Defaults to "<string>".

    Returns:
      out : hy.models.Expression
    """
    _source = re.sub(r'\A#!.*', '', source)
    res = Expression([Symbol("do")] +
                       tokenize(_source + "\n",
                                filename=filename))
    res.source = source
    res.filename = filename
    return res
Example #14
0
def make_expression(*args):
    h = Expression(args)
    h.start_line = 1
    h.end_line = 1
    h.start_column = 1
    h.end_column = 1
    return h.replace(h)
Example #15
0
def term_hashstars(state, p):
    n_stars = len(p[0].getstr()[1:])
    if n_stars == 1:
        sym = "unpack-iterable"
    elif n_stars == 2:
        sym = "unpack-mapping"
    else:
        raise LexException.from_lexer(
            "Too many stars in `#*` construct (if you want to unpack a symbol "
            "beginning with a star, separate it with whitespace)", state, p[0])
    return Expression([Symbol(sym), p[1]])
Example #16
0
def test_preprocessor_expression():
    """Test that macro expansion doesn't recurse"""
    obj = macroexpand(
        tokenize('(test (test "one" "two"))')[0], __name__,
        HyASTCompiler(__name__))

    assert type(obj) == List
    assert type(obj[0]) == Expression

    assert obj[0] == Expression([Symbol("test"), String("one"), String("two")])

    obj = List([String("one"), String("two")])
    obj = tokenize('(shill ["one" "two"])')[0][1]
    assert obj == macroexpand(obj, __name__, HyASTCompiler(__name__))
Example #17
0
File: cmdline.py Project: etanol/hy
def koan_macro(ETname):
    return Expression([Symbol('print'),
                       String("""
  Ummon asked the head monk, "What sutra are you lecturing on?"
  "The Nirvana Sutra."
  "The Nirvana Sutra has the Four Virtues, hasn't it?"
  "It has."
  Ummon asked, picking up a cup, "How many virtues has this?"
  "None at all," said the monk.
  "But ancient people said it had, didn't they?" said Ummon.
  "What do you think of what they said?"
  Ummon struck the cup and asked, "You understand?"
  "No," said the monk.
  "Then," said Ummon, "You'd better go on with your lectures on the sutra."
""")])
Example #18
0
 def add(self, target, new_name=None):
     """Add a new let-binding target, mapped to a new, unique name."""
     if isinstance(target, (str, Symbol)):
         if "." in target:
             raise ValueError("binding target may not contain a dot")
         name = mangle(target)
         if new_name is None:
             new_name = self.compiler.get_anon_var(f"_hy_let_{name}")
         self.bindings[name] = new_name
         if isinstance(target, Symbol):
             return Symbol(new_name).replace(target)
         return new_name
     if new_name is not None:
         raise ValueError("cannot specify name for compound targets")
     if isinstance(target, List):
         return List(map(self.add, target)).replace(target)
     if (
         isinstance(target, Expression)
         and target
         and target[0] in (Symbol(","), Symbol("unpack-iterable"))
     ):
         return Expression([target[0], *map(self.add, target[1:])]).replace(target)
     raise ValueError(f"invalid binding target: {type(target)}")
Example #19
0
def test_wrap_nested_expr():
    """ Test conversion of Expressions with embedded non-HyObjects."""
    wrapped = as_model(Expression([0]))
    assert type(wrapped) == Expression
    assert type(wrapped[0]) == Integer
    assert wrapped == Expression([Integer(0)])
Example #20
0
 def f(x):
     return [Expression([Symbol("foo"), x])]
Example #21
0
def term_annotation(state, p):
    return Expression([Symbol("annotate*"), p[1]])
Example #22
0
def hash_other(state, p):
    # p == [(Token('HASHOTHER', '#foo'), bar)]
    return Expression([Symbol(p[0].getstr()), p[1]])
Example #23
0
def test_lex_expression_strings():
    """ Test that expressions can produce strings """
    objs = tokenize("(foo \"bar\")")
    assert objs == [Expression([Symbol("foo"), String("bar")])]
Example #24
0
def empty_paren(state, p):
    return Expression([])
Example #25
0
def term_unquote_splice(state, p):
    return Expression([Symbol("unquote-splice"), p[1]])
Example #26
0
def test_discard():
    """Check that discarded terms are removed properly."""
    # empty
    assert tokenize("") == []
    # single
    assert tokenize("#_1") == []
    # multiple
    assert tokenize("#_1 #_2") == []
    assert tokenize("#_1 #_2 #_3") == []
    # nested discard
    assert tokenize("#_ #_1 2") == []
    assert tokenize("#_ #_ #_1 2 3") == []
    # trailing
    assert tokenize("0") == [Integer(0)]
    assert tokenize("0 #_1") == [Integer(0)]
    assert tokenize("0 #_1 #_2") == [Integer(0)]
    # leading
    assert tokenize("2") == [Integer(2)]
    assert tokenize("#_1 2") == [Integer(2)]
    assert tokenize("#_0 #_1 2") == [Integer(2)]
    assert tokenize("#_ #_0 1 2") == [Integer(2)]
    # both
    assert tokenize("#_1 2 #_3") == [Integer(2)]
    assert tokenize("#_0 #_1 2 #_ #_3 4") == [Integer(2)]
    # inside
    assert tokenize("0 #_1 2") == [Integer(0), Integer(2)]
    assert tokenize("0 #_1 #_2 3") == [Integer(0), Integer(3)]
    assert tokenize("0 #_ #_1 2 3") == [Integer(0), Integer(3)]
    # in List
    assert tokenize("[]") == [List([])]
    assert tokenize("[#_1]") == [List([])]
    assert tokenize("[#_1 #_2]") == [List([])]
    assert tokenize("[#_ #_1 2]") == [List([])]
    assert tokenize("[0]") == [List([Integer(0)])]
    assert tokenize("[0 #_1]") == [List([Integer(0)])]
    assert tokenize("[0 #_1 #_2]") == [List([Integer(0)])]
    assert tokenize("[2]") == [List([Integer(2)])]
    assert tokenize("[#_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_0 #_1 2]") == [List([Integer(2)])]
    assert tokenize("[#_ #_0 1 2]") == [List([Integer(2)])]
    # in Set
    assert tokenize("#{}") == [Set()]
    assert tokenize("#{#_1}") == [Set()]
    assert tokenize("#{0 #_1}") == [Set([Integer(0)])]
    assert tokenize("#{#_1 0}") == [Set([Integer(0)])]
    # in Dict
    assert tokenize("{}") == [Dict()]
    assert tokenize("{#_1}") == [Dict()]
    assert tokenize("{#_0 1 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 #_0 2}") == [Dict([Integer(1), Integer(2)])]
    assert tokenize("{1 2 #_0}") == [Dict([Integer(1), Integer(2)])]
    # in Expression
    assert tokenize("()") == [Expression()]
    assert tokenize("(#_foo)") == [Expression()]
    assert tokenize("(#_foo bar)") == [Expression([Symbol("bar")])]
    assert tokenize("(foo #_bar)") == [Expression([Symbol("foo")])]
    assert tokenize("(foo :bar 1)") == [
        Expression([Symbol("foo"), Keyword("bar"),
                    Integer(1)])
    ]
    assert tokenize("(foo #_:bar 1)") == [
        Expression([Symbol("foo"), Integer(1)])
    ]
    assert tokenize("(foo :bar #_1)") == [
        Expression([Symbol("foo"), Keyword("bar")])
    ]
    # discard term with nesting
    assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
        List([Integer(1), Integer(2),
              Integer(3), Integer(4)])
    ]
    # discard with other prefix syntax
    assert tokenize("a #_'b c") == [Symbol("a"), Symbol("c")]
    assert tokenize("a '#_b c") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("c")])
    ]
    assert tokenize("a '#_b #_c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
    assert tokenize("a '#_ #_b c d") == [
        Symbol("a"), Expression([Symbol("quote"), Symbol("d")])
    ]
Example #27
0
def test_lex_expression_symbols():
    """ Make sure that expressions produce symbols """
    objs = tokenize("(foo bar)")
    assert objs == [Expression([Symbol("foo"), Symbol("bar")])]
Example #28
0
def term_quasiquote(state, p):
    return Expression([Symbol("quasiquote"), p[1]])
Example #29
0
def test_lex_expression_integer():
    """ Make sure expressions can produce integers """
    objs = tokenize("(foo 2)")
    assert objs == [Expression([Symbol("foo"), Integer(2)])]
Example #30
0
def paren(state, p):
    return Expression(p[1])