Exemplo n.º 1
0
def term_hashstars(state, p):
    n_stars = len(p[0].getstr()[1:])
    if n_stars == 1:
        sym = "unpack-iterable"
    elif n_stars == 2:
        sym = "unpack-mapping"
    else:
        raise LexException.from_lexer(
            "Too many stars in `#*` construct (if you want to unpack a symbol "
            "beginning with a star, separate it with whitespace)",
            state, p[0])
    return HyExpression([HySymbol(sym), p[1]])
Exemplo n.º 2
0
def test_sets():
    """ Ensure that we can tokenize a set. """
    objs = tokenize("#{1 2}")
    assert objs == [HySet([HyInteger(1), HyInteger(2)])]
    objs = tokenize("(bar #{foo bar baz})")
    assert objs == [
        HyExpression([HySymbol("bar"),
                      HySet(["foo", "bar", "baz"])])
    ]

    objs = tokenize("#{(foo bar) (baz quux)}")
    assert objs == [
        HySet([
            HyExpression([HySymbol("foo"), HySymbol("bar")]),
            HyExpression([HySymbol("baz"), HySymbol("quux")])
        ])
    ]

    # Duplicate items in a literal set should be okay (and should
    # be preserved).
    objs = tokenize("#{1 2 1 1 2 1}")
    assert objs == [HySet([HyInteger(n) for n in [1, 2, 1, 1, 2, 1]])]
    assert len(objs[0]) == 6

    # https://github.com/hylang/hy/issues/1120
    objs = tokenize("#{a 1}")
    assert objs == [HySet([HySymbol("a"), HyInteger(1)])]
Exemplo n.º 3
0
def hy_parse(source):
    """Parse a Hy source string.

    Parameters
    ----------
    source: string
        Source code to parse.

    Returns
    -------
    out : instance of `types.CodeType`
    """
    source = re.sub(r'\A#!.*', '', source)
    return HyExpression([HySymbol("do")] + tokenize(source + "\n"))
Exemplo n.º 4
0
def koan_macro(ETname):
    return HyExpression([HySymbol('print'),
                         HyString("""
  Ummon asked the head monk, "What sutra are you lecturing on?"
  "The Nirvana Sutra."
  "The Nirvana Sutra has the Four Virtues, hasn't it?"
  "It has."
  Ummon asked, picking up a cup, "How many virtues has this?"
  "None at all," said the monk.
  "But ancient people said it had, didn't they?" said Ummon.
  "What do you think of what they said?"
  Ummon struck the cup and asked, "You understand?"
  "No," said the monk.
  "Then," said Ummon, "You'd better go on with your lectures on the sutra."
""")])
Exemplo n.º 5
0
def test_compiler_yield_return():
    """
    Check that the compiler correctly generates return statements for
    a generator function. In Python versions prior to 3.3, the return
    statement in a generator can't take a value, so the final expression
    should not generate a return statement. From 3.3 onwards a return
    value should be generated.
    """
    e = make_expression(
        HySymbol("fn"), HyList(),
        HyExpression([HySymbol("yield"), HyInteger(2)]),
        HyExpression([HySymbol("+"), HyInteger(1),
                      HyInteger(1)]))
    ret = compiler.HyASTCompiler(types.ModuleType('test')).compile_atom(e)

    assert len(ret.stmts) == 1
    stmt, = ret.stmts
    assert isinstance(stmt, ast.FunctionDef)
    body = stmt.body
    assert len(body) == 2
    assert isinstance(body[0], ast.Expr)
    assert isinstance(body[0].value, ast.Yield)
    assert isinstance(body[1], ast.Return)
    assert isinstance(body[1].value, ast.BinOp)
Exemplo n.º 6
0
def test_lex_digit_separators():

    assert tokenize("1_000_000") == [HyInteger(1000000)]
    assert tokenize("1,000,000") == [HyInteger(1000000)]
    assert tokenize("1,000_000") == [HyInteger(1000000)]
    assert tokenize("1_000,000") == [HyInteger(1000000)]

    assert tokenize("0x_af") == [HyInteger(0xaf)]
    assert tokenize("0x,af") == [HyInteger(0xaf)]
    assert tokenize("0b_010") == [HyInteger(0b010)]
    assert tokenize("0b,010") == [HyInteger(0b010)]
    assert tokenize("0o_373") == [HyInteger(0o373)]
    assert tokenize("0o,373") == [HyInteger(0o373)]

    assert tokenize('1_2.3,4') == [HyFloat(12.34)]
    assert tokenize('1_2e3,4') == [HyFloat(12e34)]
    assert (tokenize("1,2/3_4") ==
            [HyExpression([HySymbol("fraction"),
             HyInteger(12), HyInteger(34)])])
    assert tokenize("1,0_00j") == [HyComplex(1000j)]

    assert tokenize(",,,,___,__1__,,__,,2__,,,__") == [HyInteger(12)]
    assert (tokenize(",,,,___,__1__,,__,,2__,q,__") ==
            [HySymbol(",,,,___,__1__,,__,,2__,q,__")])
Exemplo n.º 7
0
def test_preprocessor_expression():
    """ Test that macro expansion doesn't recurse"""
    obj = macroexpand(
        tokenize('(test (test "one" "two"))')[0], HyASTCompiler(__name__))

    assert type(obj) == HyList
    assert type(obj[0]) == HyExpression

    assert obj[0] == HyExpression(
        [HySymbol("test"), HyString("one"),
         HyString("two")])

    obj = HyList([HyString("one"), HyString("two")])
    obj = tokenize('(shill ["one" "two"])')[0][1]
    assert obj == macroexpand(obj, HyASTCompiler(""))
Exemplo n.º 8
0
Arquivo: cmdline.py Projeto: munk/hy
    def runsource(self, source, filename='<input>', symbol='single'):
        global SIMPLE_TRACEBACKS
        try:
            try:
                tokens = tokenize(source)
            except PrematureEndOfInput:
                return True
            do = HyExpression([HySymbol('do')] + tokens)
            do.start_line = do.end_line = do.start_column = do.end_column = 1
            do.replace(do)
        except LexException as e:
            if e.source is None:
                e.source = source
                e.filename = filename
            print(e, file=sys.stderr)
            return False

        try:

            def ast_callback(main_ast, expr_ast):
                if self.spy:
                    # Mush the two AST chunks into a single module for
                    # conversion into Python.
                    new_ast = ast.Module(main_ast.body +
                                         [ast.Expr(expr_ast.body)])
                    print(astor.to_source(new_ast))

            value = hy_eval(do, self.locals, "__console__", ast_callback)
        except HyTypeError as e:
            if e.source is None:
                e.source = source
                e.filename = filename
            if SIMPLE_TRACEBACKS:
                print(e, file=sys.stderr)
            else:
                self.showtraceback()
            return False
        except Exception:
            self.showtraceback()
            return False

        if value is not None:
            # Make the last non-None value available to
            # the user as `_`.
            self.locals['_'] = value
            # Print the value.
            print(self.output_fn(value))
        return False
Exemplo n.º 9
0
def hy_parse(source, filename='<string>'):
    """Parse a Hy source string.

    Args:
      source (string): Source code to parse.
      filename (string, optional): File name corresponding to source.  Defaults to "<string>".

    Returns:
      out : HyExpression
    """
    _source = re.sub(r'\A#!.*', '', source)
    res = HyExpression([HySymbol("do")] +
                       tokenize(_source + "\n", filename=filename))
    res.source = source
    res.filename = filename
    return res
Exemplo n.º 10
0
def t_identifier(p):
    obj = p[0].value

    val = symbol_like(obj)
    if val is not None:
        return val

    if "." in obj and symbol_like(obj.split(".", 1)[0]) is not None:
        # E.g., `5.attr` or `:foo.attr`
        raise LexException(
            'Cannot access attribute on anything other than a name (in '
            'order to get attributes of expressions, use '
            '`(. <expression> <attr>)` or `(.<attr> <expression>)`)',
            p[0].source_pos.lineno, p[0].source_pos.colno)

    return HySymbol(obj)
Exemplo n.º 11
0
def test_lex_nan_and_inf():

    assert isnan(tokenize("NaN")[0])
    assert tokenize("Nan") == [HySymbol("Nan")]
    assert tokenize("nan") == [HySymbol("nan")]
    assert tokenize("NAN") == [HySymbol("NAN")]

    assert tokenize("Inf") == [HyFloat(float("inf"))]
    assert tokenize("inf") == [HySymbol("inf")]
    assert tokenize("INF") == [HySymbol("INF")]

    assert tokenize("-Inf") == [HyFloat(float("-inf"))]
    assert tokenize("-inf") == [HySymbol("_inf")]
    assert tokenize("-INF") == [HySymbol("_INF")]
Exemplo n.º 12
0
    def test_fn_compiler_empty_function(self):
        ret = self.c.compile_function_def(
            self._make_expression(HySymbol("fn"), HyList()))
        self.assertEqual(ret.imports, {})

        self.assertEqual(len(ret.stmts), 1)
        stmt = ret.stmts[0]
        self.assertIsInstance(stmt, ast.FunctionDef)
        self.assertIsInstance(stmt.args, ast.arguments)
        self.assertEqual(stmt.args.vararg, None)
        self.assertEqual(stmt.args.kwarg, None)
        self.assertEqual(stmt.args.defaults, [])
        self.assertEqual(stmt.decorator_list, [])
        self.assertEqual(len(stmt.body), 1)
        self.assertIsInstance(stmt.body[0], ast.Pass)

        self.assertIsInstance(ret.expr, ast.Name)
Exemplo n.º 13
0
def test_lex_mangling_qmark():
    """Ensure that identifiers ending with a question mark get mangled ok"""
    entry = tokenize("foo?")
    assert entry == [HySymbol("is_foo")]
    entry = tokenize("?")
    assert entry == [HySymbol("?")]
    entry = tokenize("im?foo")
    assert entry == [HySymbol("im?foo")]
    entry = tokenize(".foo?")
    assert entry == [HySymbol(".is_foo")]
    entry = tokenize("foo.bar?")
    assert entry == [HySymbol("foo.is_bar")]
    entry = tokenize("foo?.bar")
    assert entry == [HySymbol("is_foo.bar")]
    entry = tokenize(".foo?.bar.baz?")
    assert entry == [HySymbol(".is_foo.bar.is_baz")]
Exemplo n.º 14
0
def test_lex_mangling_bang():
    """Ensure that identifiers ending with a bang get mangled ok"""
    entry = tokenize("foo!")
    assert entry == [HySymbol("foo_bang")]
    entry = tokenize("!")
    assert entry == [HySymbol("!")]
    entry = tokenize("im!foo")
    assert entry == [HySymbol("im!foo")]
    entry = tokenize(".foo!")
    assert entry == [HySymbol(".foo_bang")]
    entry = tokenize("foo.bar!")
    assert entry == [HySymbol("foo.bar_bang")]
    entry = tokenize("foo!.bar")
    assert entry == [HySymbol("foo_bang.bar")]
    entry = tokenize(".foo!.bar.baz!")
    assert entry == [HySymbol(".foo_bang.bar.baz_bang")]
Exemplo n.º 15
0
def hash_other(p):
    # p == [(Token('HASHOTHER', '#foo'), bar)]
    st = p[0].getstr()[1:]
    str_object = HyString(st)
    expr = p[1]
    return HyExpression([HySymbol("dispatch-tag-macro"), str_object, expr])
Exemplo n.º 16
0
def term_unquote_splice(p):
    return HyExpression([HySymbol("unquote-splice"), p[1]])
Exemplo n.º 17
0
def term_quasiquote(p):
    return HyExpression([HySymbol("quasiquote"), p[1]])
Exemplo n.º 18
0
def test_lex_symbols():
    """ Make sure that symbols are valid expressions"""
    objs = tokenize("foo ")
    assert objs == [HySymbol("foo")]
Exemplo n.º 19
0
def test_lex_expression_integer():
    """ Make sure expressions can produce integers """
    objs = tokenize("(foo 2)")
    assert objs == [HyExpression([HySymbol("foo"), HyInteger(2)])]
Exemplo n.º 20
0
def test_lex_expression_strings():
    """ Test that expressions can produce strings """
    objs = tokenize("(foo \"bar\")")
    assert objs == [HyExpression([HySymbol("foo"), HyString("bar")])]
Exemplo n.º 21
0
def test_lex_fractions():
    """ Make sure that fractions are valid expressions"""
    objs = tokenize("1/2")
    assert objs == [HyExpression([HySymbol("fraction"), HyInteger(1),
                                  HyInteger(2)])]
Exemplo n.º 22
0
def test_lex_expression_symbols():
    """ Make sure that expressions produce symbols """
    objs = tokenize("(foo bar)")
    assert objs == [HyExpression([HySymbol("foo"), HySymbol("bar")])]
Exemplo n.º 23
0
def test_simple_cons():
    """Check that cons gets tokenized correctly"""
    entry = tokenize("(a . b)")[0]
    assert entry == HyCons(HySymbol("a"), HySymbol("b"))
Exemplo n.º 24
0
def test_lex_mangling_hyphen():
    """Ensure that hyphens get translated to underscores during mangling"""
    entry = tokenize("foo-bar")
    assert entry == [HySymbol("foo_bar")]
    entry = tokenize("-")
    assert entry == [HySymbol("-")]
Exemplo n.º 25
0
    def f(x): return [HyExpression([HySymbol("foo"), x])]

    assert t("2.j") == f(HyComplex(2.j))
Exemplo n.º 26
0
def test_tag_macro():
    """Ensure tag macros are handled properly"""
    entry = tokenize("#^()")
    assert entry[0][0] == HySymbol("dispatch_tag_macro")
    assert entry[0][1] == HyString("^")
    assert len(entry[0]) == 3
Exemplo n.º 27
0
Arquivo: importer.py Projeto: waigx/hy
def import_buffer_to_hst(buf):
    """Import content from buf and return a Hy AST."""
    return HyExpression([HySymbol("do")] + tokenize(buf + "\n"))
Exemplo n.º 28
0
def test_discard():
    """Check that discarded terms are removed properly."""
    # empty
    assert tokenize("") == []
    # single
    assert tokenize("#_1") == []
    # multiple
    assert tokenize("#_1 #_2") == []
    assert tokenize("#_1 #_2 #_3") == []
    # nested discard
    assert tokenize("#_ #_1 2") == []
    assert tokenize("#_ #_ #_1 2 3") == []
    # trailing
    assert tokenize("0") == [0]
    assert tokenize("0 #_1") == [0]
    assert tokenize("0 #_1 #_2") == [0]
    # leading
    assert tokenize("2") == [2]
    assert tokenize("#_1 2") == [2]
    assert tokenize("#_0 #_1 2") == [2]
    assert tokenize("#_ #_0 1 2") == [2]
    # both
    assert tokenize("#_1 2 #_3") == [2]
    assert tokenize("#_0 #_1 2 #_ #_3 4") == [2]
    # inside
    assert tokenize("0 #_1 2") == [0, 2]
    assert tokenize("0 #_1 #_2 3") == [0, 3]
    assert tokenize("0 #_ #_1 2 3") == [0, 3]
    # in HyList
    assert tokenize("[]") == [HyList([])]
    assert tokenize("[#_1]") == [HyList([])]
    assert tokenize("[#_1 #_2]") == [HyList([])]
    assert tokenize("[#_ #_1 2]") == [HyList([])]
    assert tokenize("[0]") == [HyList([HyInteger(0)])]
    assert tokenize("[0 #_1]") == [HyList([HyInteger(0)])]
    assert tokenize("[0 #_1 #_2]") == [HyList([HyInteger(0)])]
    assert tokenize("[2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_1 2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_0 #_1 2]") == [HyList([HyInteger(2)])]
    assert tokenize("[#_ #_0 1 2]") == [HyList([HyInteger(2)])]
    # in HySet
    assert tokenize("#{}") == [HySet()]
    assert tokenize("#{#_1}") == [HySet()]
    assert tokenize("#{0 #_1}") == [HySet([HyInteger(0)])]
    assert tokenize("#{#_1 0}") == [HySet([HyInteger(0)])]
    # in HyDict
    assert tokenize("{}") == [HyDict()]
    assert tokenize("{#_1}") == [HyDict()]
    assert tokenize("{#_0 1 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
    assert tokenize("{1 #_0 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
    assert tokenize("{1 2 #_0}") == [HyDict([HyInteger(1), HyInteger(2)])]
    # in HyExpression
    assert tokenize("()") == [HyExpression()]
    assert tokenize("(#_foo)") == [HyExpression()]
    assert tokenize("(#_foo bar)") == [HyExpression([HySymbol("bar")])]
    assert tokenize("(foo #_bar)") == [HyExpression([HySymbol("foo")])]
    assert tokenize("(foo :bar 1)") == [
        HyExpression([HySymbol("foo"),
                      HyKeyword(":bar"),
                      HyInteger(1)])
    ]
    assert tokenize("(foo #_:bar 1)") == [
        HyExpression([HySymbol("foo"), HyInteger(1)])
    ]
    assert tokenize("(foo :bar #_1)") == [
        HyExpression([HySymbol("foo"), HyKeyword(":bar")])
    ]
    # discard term with nesting
    assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
        HyList([HyInteger(1),
                HyInteger(2),
                HyInteger(3),
                HyInteger(4)])
    ]
    # discard with other prefix syntax
    assert tokenize("a #_'b c") == [HySymbol("a"), HySymbol("c")]
    assert tokenize("a '#_b c") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("c")])
    ]
    assert tokenize("a '#_b #_c d") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("d")])
    ]
    assert tokenize("a '#_ #_b c d") == [
        HySymbol("a"),
        HyExpression([HySymbol("quote"), HySymbol("d")])
    ]
Exemplo n.º 29
0
def test_lex_comment_382():
    """Ensure that we can tokenize sources with a comment at the end"""
    entry = tokenize("foo ;bar\n;baz")
    assert entry == [HySymbol("foo")]
Exemplo n.º 30
0
 def f(x):
     return [HyExpression([HySymbol("foo"), x])]