コード例 #1
0
def _test_expr(tree):
    # Note we want the line number *before macro expansion*, so we capture it now.
    ln = q[u[tree.lineno]] if hasattr(tree, "lineno") else q[None]
    filename = q[h[callsite_filename]()]
    asserter = q[h[unpythonic_assert]]

    # test[expr, message]  (like assert expr, message)
    if type(tree) is Tuple and len(tree.elts) == 2:
        tree, message = tree.elts
    # test[expr]  (like assert expr)
    else:
        message = q[None]

    # Before we edit the tree, get the source code in its pre-transformation
    # state, so we can include that into the test failure message.
    #
    # We capture the source in the outside-in pass, so that no macros inside `tree`
    # are expanded yet. For the same reason, we process the `the[]` marks in the
    # outside-in pass.
    #
    # (Note, however, that if the `test[]` is nested within the invocation of
    #  a code-walking block macro, that macro may have performed edits already.
    #  For this reason, we provide `with expand_testing_macros_first`, which
    #  in itself is a code-walking block macro, whose only purpose is to force
    #  `test[]` and its sisters to expand first.)
    sourcecode = unparse(tree)

    envname = gensym("e")  # for injecting the captured value

    # Handle the `the[...]` marks, if any.
    tree, the_exprs = _transform_important_subexpr(tree, envname=envname)
    if not the_exprs and type(
            tree) is Compare:  # inject the implicit the[] on the LHS
        tree.left = _inject_value_recorder(envname, tree.left)

    # We delay the execution of the test expr using a lambda, so
    # `unpythonic_assert` can get control first before the expr runs.
    #
    # Also, we need the lambda for passing in the value capture environment
    # for the `the[]` mark, anyway.
    #
    # We can't inject `lazy[]` here (to be more explicit this is a delay operation),
    # because we need to pass the environment.
    #
    # We name the lambda `testexpr` to make the stack trace more understandable.
    # If you change the name, change it also in `unpythonic_assert`.
    thelambda = q[lambda _: a[tree]]
    thelambda.args.args[0] = arg(
        arg=envname)  # inject the gensymmed parameter name
    func_tree = q[h[namelambda]("testexpr")(
        a[thelambda])]  # create the function that takes in the env

    return q[(a[asserter])(u[sourcecode],
                           a[func_tree],
                           filename=a[filename],
                           lineno=a[ln],
                           message=a[message])]
コード例 #2
0
ファイル: letdo.py プロジェクト: Technologicat/unpythonic
def _let_decorator_impl(bindings, body, mode, kind):
    assert mode in ("let", "letrec")
    assert kind in ("decorate", "call")
    if type(body) not in (FunctionDef, AsyncFunctionDef):
        raise SyntaxError(
            "Expected a function definition to decorate")  # pragma: no cover
    body = dyn._macro_expander.visit_recursively(body)
    if not bindings:
        # Similarly as above, this cannot trigger from the macro layer no
        # matter what that layer does. This is here to optimize away a `dlet`
        # with no bindings, when used directly from other syntax transformers.
        return body  # pragma: no cover
    bindings = dyn._macro_expander.visit_recursively(bindings)

    names, values = zip(*[b.elts for b in bindings
                          ])  # --> (k1, ..., kn), (v1, ..., vn)
    names = [getname(k, accept_attr=False) for k in names
             ]  # any duplicates will be caught by env at run-time

    e = gensym("e")
    envset = q[n[f"{e}.set"]]

    transform1 = partial(_letlike_transform,
                         envname=e,
                         lhsnames=names,
                         rhsnames=names,
                         setter=envset)
    transform2 = partial(transform1, dowrap=False)
    if mode == "letrec":
        values = [transform1(rhs) for rhs in values]
        values = [
            q[h[namelambda](u[f"letrec_binding{j}_{lhs}"])(a[rhs])]
            for j, (lhs, rhs) in enumerate(zip(names, values), start=1)
        ]
    body = transform2(body)

    # We place the let decorator in the innermost position. Hopefully this is ok.
    # (unpythonic.syntax.util.suggest_decorator_index can't help us here,
    #  since "let" is not one of the registered decorators)
    letter = dletf if kind == "decorate" else bletf
    bindings = [q[(u[k], a[v])] for k, v in zip(names, values)]
    # CAUTION: letdoutil.py relies on:
    #  - the literal name "letter" to detect expanded let forms
    #  - the "mode" kwarg to detect let/letrec mode
    #  - the presence of an "_envname" kwarg to detect this tree represents a let-decorator (vs. a let-expr),
    #    seeing only the Call node
    #  - the exact AST structure, for the views
    body.decorator_list = body.decorator_list + [
        q[h[letter](a[Tuple(elts=bindings)], mode=u[mode], _envname=u[e])]
    ]
    body.args.kwonlyargs = body.args.kwonlyargs + [arg(arg=e)]
    body.args.kw_defaults = body.args.kw_defaults + [None]
    return body
コード例 #3
0
ファイル: letdo.py プロジェクト: Technologicat/unpythonic
def _let_expr_impl(bindings, body, mode):
    """bindings: sequence of ast.Tuple: (k1, v1), (k2, v2), ..., (kn, vn)"""
    assert mode in ("let", "letrec")

    # The let constructs are currently inside-out macros; expand other macro
    # invocations in both bindings and body.
    #
    # But apply the implicit `do` (extra bracket syntax) first.
    # (It is important we expand at least that immediately after, to resolve its local variables,
    #  because those may have the same lexical names as some of the let-bindings.)
    body = _implicit_do(body)
    body = dyn._macro_expander.visit_recursively(body)
    if not bindings:
        # Optimize out a `let` with no bindings. The macro layer cannot trigger
        # this case, because our syntaxes always require at least one binding.
        # So this check is here just to protect against use with no bindings directly
        # from other syntax transformers, which in theory could attempt anything.
        return body  # pragma: no cover
    bindings = dyn._macro_expander.visit_recursively(bindings)

    names, values = zip(*[b.elts for b in bindings
                          ])  # --> (k1, ..., kn), (v1, ..., vn)
    names = [getname(k, accept_attr=False) for k in names
             ]  # any duplicates will be caught by env at run-time

    e = gensym("e")
    envset = q[n[f"{e}.set"]]

    transform = partial(_letlike_transform,
                        envname=e,
                        lhsnames=names,
                        rhsnames=names,
                        setter=envset)
    if mode == "letrec":
        values = [transform(rhs) for rhs in values]  # RHSs of bindings
        values = [
            q[h[namelambda](u[f"letrec_binding{j}_{lhs}"])(a[rhs])]
            for j, (lhs, rhs) in enumerate(zip(names, values), start=1)
        ]
    body = transform(body)
    body = q[h[namelambda](u[f"{mode}_body"])(a[body])]

    # CAUTION: letdoutil.py relies on:
    #  - the literal name "letter" to detect expanded let forms
    #  - the "mode" kwarg to detect let/letrec mode
    #  - the absence of an "_envname" kwarg to detect this tree represents a let-expr (vs. a let-decorator),
    #    seeing only the Call node
    #  - the exact AST structure, for the views
    letter = letf
    bindings = [q[(u[k], a[v])] for k, v in zip(names, values)]
    newtree = q[h[letter](t[bindings], a[body], mode=u[mode])]
    return newtree
コード例 #4
0
 def transform(self, tree):
     if is_captured_value(tree):
         return tree  # don't recurse!
     # Don't recurse into nested `f[]`.
     # TODO: This would benefit from macro destructuring in the expander.
     # TODO: See https://github.com/Technologicat/mcpyrate/issues/3
     if type(tree) is Subscript and type(
             tree.value) is Name and tree.value.id in mynames:
         return tree
     elif type(tree) is Name and tree.id == "_":
         name = gensym("_")
         tree.id = name
         self.collect(name)
     return self.generic_visit(tree)
コード例 #5
0
ファイル: autoref.py プロジェクト: Technologicat/unpythonic
    def makeautoreference(tree):
        # We don't need to care about `Done` markers from expanded `@namemacro`s
        # because the transformer that calls this function recurses into them.
        assert type(tree) is Name and (type(tree.ctx) is Load or not tree.ctx)
        newtree = q[(lambda __ar_: __ar_[1] if __ar_[0] else a[tree])(h[_autoref_resolve]((n[o], u[tree.id])))]
        our_lambda_argname = gensym("_ar")

        # TODO: could we use `mcpyrate.utils.rename` here?
        class PlaceholderRenamer(ASTTransformer):
            def transform(self, tree):
                if is_captured_value(tree):
                    return tree  # don't recurse!
                if type(tree) is Name and tree.id == "__ar_":
                    tree.id = our_lambda_argname
                elif type(tree) is arg and tree.arg == "__ar_":
                    tree.arg = our_lambda_argname
                return self.generic_visit(tree)
        return PlaceholderRenamer().visit(newtree)
コード例 #6
0
def _test_block_signals_or_raises(block_body, args, syntaxname, asserter):
    if not block_body:
        return []  # pragma: no cover, cannot happen through the public API.
    first_stmt = block_body[0]

    # Note we want the line number *before macro expansion*, so we capture it now.
    ln = q[u[first_stmt.lineno]] if hasattr(first_stmt, "lineno") else q[None]
    filename = q[h[callsite_filename]()]

    # with test_raises[exctype, message]:
    if len(args) == 2:
        exctype, message = args
    # with test_raises[exctype]:
    elif len(args) == 1:
        exctype = args[0]
        message = q[None]
    else:
        raise SyntaxError(
            f'Expected `with {syntaxname}(exctype):` or `with {syntaxname}[exctype, message]:`'
        )  # pragma: no cover

    # Same remark about outside-in source code capture as in `_test_expr`.
    sourcecode = unparse(block_body)

    testblock_function_name = gensym("_test_block")
    thetest = q[(a[asserter])(a[exctype],
                              u[sourcecode],
                              n[testblock_function_name],
                              filename=a[filename],
                              lineno=a[ln],
                              message=a[message])]
    with q as newbody:

        def _insert_funcname_here_(
        ):  # no env needed, since `the[]` is not meaningful here.
            ...

        a[thetest]
    thefunc = newbody[0]
    thefunc.name = testblock_function_name
    thefunc.body = block_body
    return newbody
コード例 #7
0
ファイル: autoref.py プロジェクト: Technologicat/unpythonic
def _autoref(block_body, args, asname):
    # first pass, outside-in
    if len(args) != 1:
        raise SyntaxError("expected exactly one argument, the expr to implicitly reference")  # pragma: no cover
    if not block_body:
        raise SyntaxError("expected at least one statement inside the 'with autoref' block")  # pragma: no cover

    block_body = dyn._macro_expander.visit_recursively(block_body)

    # second pass, inside-out

    # `autoref`'s analyzer needs the `ctx` attributes in `tree` to be filled in correctly.
    block_body = fix_ctx(block_body, copy_seen_nodes=False)  # TODO: or maybe copy seen nodes?

    o = asname.id if asname else gensym("_o")  # Python itself guarantees asname to be a bare Name.

    # (lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))
    def isautoreference(tree):
        return (type(tree) is Call and
                len(tree.args) == 1 and type(tree.args[0]) is Call and
                isx(tree.args[0].func, "_autoref_resolve") and
                type(tree.func) is Lambda and len(tree.func.args.args) == 1 and
                tree.func.args.args[0].arg.startswith("_ar"))
    def get_resolver_list(tree):  # (p, o, "x")
        return tree.args[0].args[0].elts
    def add_to_resolver_list(tree, objnode):
        lst = get_resolver_list(tree)
        lst.insert(-1, objnode)

    # x --> the autoref code above.
    def makeautoreference(tree):
        # We don't need to care about `Done` markers from expanded `@namemacro`s
        # because the transformer that calls this function recurses into them.
        assert type(tree) is Name and (type(tree.ctx) is Load or not tree.ctx)
        newtree = q[(lambda __ar_: __ar_[1] if __ar_[0] else a[tree])(h[_autoref_resolve]((n[o], u[tree.id])))]
        our_lambda_argname = gensym("_ar")

        # TODO: could we use `mcpyrate.utils.rename` here?
        class PlaceholderRenamer(ASTTransformer):
            def transform(self, tree):
                if is_captured_value(tree):
                    return tree  # don't recurse!
                if type(tree) is Name and tree.id == "__ar_":
                    tree.id = our_lambda_argname
                elif type(tree) is arg and tree.arg == "__ar_":
                    tree.arg = our_lambda_argname
                return self.generic_visit(tree)
        return PlaceholderRenamer().visit(newtree)

    class AutorefTransformer(ASTTransformer):
        def transform(self, tree):
            if is_captured_value(tree):
                return tree  # don't recurse!

            referents = self.state.referents
            if type(tree) in (Attribute, Subscript, Name) and type(tree.ctx) in (Store, Del):
                return tree
            # skip autoref lookup for let/do envs
            elif islet(tree):
                view = ExpandedLetView(tree)
                self.generic_withstate(tree, referents=referents + [view.body.args.args[0].arg])  # lambda e14: ...
            elif isdo(tree):
                view = ExpandedDoView(tree)
                self.generic_withstate(tree, referents=referents + [view.body[0].args.args[0].arg])  # lambda e14: ...
            elif isinstance(tree, ExpandedAutorefMarker):
                self.generic_withstate(tree, referents=referents + [tree.varname])
            elif isautoreference(tree):  # generated by an inner already expanded autoref block
                thename = getconstant(get_resolver_list(tree)[-1])
                if thename in referents:
                    # This case is tricky to trigger, so let's document it here. This code:
                    #
                    # with autoref[e]:
                    #     with autoref[e2]:
                    #         e
                    #
                    # expands to:
                    #
                    # $ASTMarker<ExpandedAutorefMarker>:
                    #     varname: '_o5'
                    #     body:
                    #         _o5 = e
                    #         $ASTMarker<ExpandedAutorefMarker>:
                    #             varname: '_o4'
                    #             body:
                    #                 _o4 = (lambda _ar13: (_ar13[1] if _ar13[0] else e2))(_autoref_resolve((_o5, 'e2')))
                    #                 (lambda _ar9: (_ar9[1] if _ar9[0] else e))(_autoref_resolve((_o4, _o5, 'e')))
                    #
                    # so there's no "e" as referent; the actual referent has a gensymmed name.
                    # Inside the body of the inner autoref, looking up "e" in e2 before falling
                    # back to the outer "e" is exactly what `autoref` is expected to do.
                    #
                    # Where is this used, then? The named variant `with autoref[...] as ...`:
                    #
                    # with step_expansion:
                    #     with autoref[e] as outer:
                    #         with autoref[e2] as inner:
                    #             outer
                    #
                    # expands to:
                    #
                    # $ASTMarker<ExpandedAutorefMarker>:
                    #     varname: 'outer'
                    #     body:
                    #         outer = e
                    #         $ASTMarker<ExpandedAutorefMarker>:
                    #             varname: 'inner'
                    #             body:
                    #                 inner = (lambda _ar17: (_ar17[1] if _ar17[0] else e2))(_autoref_resolve((outer, 'e2')))
                    #                 outer  # <-- !!!
                    #
                    # Now this case is triggered; we get a bare `outer` inside the inner body.
                    # TODO: Whether this wart is a good idea is another question...

                    # remove autoref lookup for an outer referent, inserted early by an inner autoref block
                    # (that doesn't know that any outer block exists)
                    tree = q[n[thename]]  # (lambda ...)(_autoref_resolve((p, "o"))) --> o
                else:
                    add_to_resolver_list(tree, q[n[o]])  # _autoref_resolve((p, "x")) --> _autoref_resolve((p, o, "x"))
                return tree
            elif isinstance(tree, ExpandedAutorefMarker):  # nested autorefs
                return tree
            elif type(tree) is Name and (type(tree.ctx) is Load or not tree.ctx) and tree.id not in referents:
                tree = makeautoreference(tree)
                return tree
            # Attribute works as-is, because a.b.c --> Attribute(Attribute(a, "b"), "c"), so Name "a" gets transformed.
            # Subscript similarly, a[1][2] --> Subscript(Subscript(a, 1), 2), so Name "a" gets transformed.
            return self.generic_visit(tree)

    # Skip (by name) some common references inserted by other macros.
    #
    # This part runs in the inside-out pass, so any outside-in macro invocations,
    # as well as any inside-out macro invocations inside the `with autoref`
    # block, have already expanded by the time we run our transformer.
    always_skip = ['letter', 'dof',  # let/do subsystem
                   'namelambda',  # lambdatools subsystem
                   'curry', 'curryf' 'currycall',  # autocurry subsystem
                   'lazy', 'lazyrec', 'maybe_force_args',  # lazify subsystem
                   # the test framework subsystem
                   'callsite_filename', 'returns_normally'] + _test_function_names
    with q as newbody:
        n[o] = a[args[0]]
    for stmt in block_body:
        newbody.append(AutorefTransformer(referents=always_skip + [o]).visit(stmt))

    return ExpandedAutorefMarker(body=newbody, varname=o)
コード例 #8
0
def _test_block(block_body, args):
    if not block_body:
        return []  # pragma: no cover, cannot happen through the public API.
    first_stmt = block_body[0]

    # Note we want the line number *before macro expansion*, so we capture it now.
    ln = q[u[first_stmt.lineno]] if hasattr(first_stmt, "lineno") else q[None]
    filename = q[h[callsite_filename]()]
    asserter = q[h[unpythonic_assert]]

    # with test[message]:
    if len(args) == 1:
        message = args[0]
    # with test:
    elif len(args) == 0:
        message = q[None]
    else:
        raise SyntaxError('Expected `with test:` or `with test[message]:`'
                          )  # pragma: no cover

    # Same remark about outside-in source code capture as in `_test_expr`.
    sourcecode = unparse(block_body)

    envname = gensym("e")  # for injecting the captured value

    # Handle the `the[...]` marks, if any.
    block_body, the_exprs = _transform_important_subexpr(block_body,
                                                         envname=envname)

    # Prepare the function template to be injected, and splice the contents
    # of the `with test` block as the function body.
    testblock_function_name = gensym("_test_block")
    thetest = q[(a[asserter])(u[sourcecode],
                              n[testblock_function_name],
                              filename=a[filename],
                              lineno=a[ln],
                              message=a[message])]
    with q as newbody:

        def _insert_funcname_here_(_insert_envname_here_):
            ...  # to be filled in below

        a[thetest]  # call the asserter
    thefunc = newbody[0]
    thefunc.name = testblock_function_name
    thefunc.args.args[0] = arg(
        arg=envname)  # inject the gensymmed parameter name
    thefunc.body = block_body

    # Handle the return statement.
    #
    # We just check if there is at least one; if so, we don't need to do
    # anything; the returned value is what the test should return to the
    # asserter.
    for stmt in thefunc.body:
        if type(stmt) is Return:
            retval = stmt.value
            if not the_exprs and type(retval) is Compare:
                # inject the implicit the[] on the LHS
                retval.left = _inject_value_recorder(envname, retval.left)
            break
    else:
        # When there is no return statement at the top level of the `with test` block,
        # we inject a `return True` to satisfy the test when the injected function
        # returns normally.
        with q as thereturn:
            return True
        thefunc.body.extend(thereturn)

    return newbody
コード例 #9
0
ファイル: letdo.py プロジェクト: Technologicat/unpythonic
def _do(tree):
    if type(tree) not in (Tuple, List):
        raise SyntaxError(
            "do body: expected a sequence of comma-separated expressions"
        )  # pragma: no cover, let's not test the macro expansion errors.

    e = gensym("e")
    envset = q[n[f"{e}._set"]]  # use internal _set to allow new definitions
    envdel = q[n[f"{e}.pop"]]

    islocaldef = partial(is_unexpanded_expr_macro, local, dyn._macro_expander)
    isdelete = partial(is_unexpanded_expr_macro, delete, dyn._macro_expander)

    def transform_localdefs(tree):
        class LocaldefCollector(ASTTransformer):
            def transform(self, tree):
                if is_captured_value(tree):
                    return tree  # don't recurse!
                expr = islocaldef(tree)
                if expr:
                    if not isenvassign(expr):
                        raise SyntaxError(
                            "local[...] takes exactly one expression of the form 'name << value'"
                        )  # pragma: no cover
                    view = UnexpandedEnvAssignView(expr)
                    self.collect(view.name)
                    view.value = self.visit(
                        view.value
                    )  # nested local[] (e.g. from `do0[local[y << 5],]`)
                    return expr  # `local[x << 21]` --> `x << 21`; compiling *that* makes the env-assignment occur.
                return tree  # don't recurse!

        c = LocaldefCollector()
        tree = c.visit(tree)
        return tree, c.collected

    def transform_deletes(tree):
        class DeleteCollector(ASTTransformer):
            def transform(self, tree):
                if is_captured_value(tree):
                    return tree  # don't recurse!
                expr = isdelete(tree)
                if expr:
                    if type(expr) is not Name:
                        raise SyntaxError("delete[...] takes exactly one name"
                                          )  # pragma: no cover
                    self.collect(expr.id)
                    return q[a[envdel](
                        u[expr.id])]  # `delete[x]` --> `e.pop('x')`
                return tree  # don't recurse!

        c = DeleteCollector()
        tree = c.visit(tree)
        return tree, c.collected

    def check_strays(ismatch, tree):
        class StrayHelperMacroChecker(ASTVisitor):  # TODO: refactor this?
            def examine(self, tree):
                if is_captured_value(tree):
                    return  # don't recurse!
                elif isdo(tree, expanded=False):
                    return  # don't recurse!
                elif ismatch(tree):
                    # Expand the stray helper macro invocation, to trigger its `SyntaxError`
                    # with a useful message, and *make the expander generate a use site traceback*.
                    #
                    # (If we just `raise` here directly, the expander won't see the use site
                    #  of the `local[]` or `delete[]`, but just that of the `do[]`.)
                    dyn._macro_expander.visit(tree)
                self.generic_visit(tree)

        StrayHelperMacroChecker().visit(tree)

    check_stray_localdefs = partial(check_strays, islocaldef)
    check_stray_deletes = partial(check_strays, isdelete)

    names = []
    lines = []
    for j, expr in enumerate(tree.elts, start=1):
        # Despite the recursion, this will not trigger false positives for nested do[] expressions,
        # because the transformers only operate at the top level of this do[].
        expr, newnames = transform_localdefs(expr)
        expr, deletednames = transform_deletes(expr)
        if newnames and deletednames:
            raise SyntaxError(
                "a do-item may have only local[] or delete[], not both"
            )  # pragma: no cover
        if newnames:
            if any(x in names for x in newnames):
                raise SyntaxError("local names must be unique in the same do"
                                  )  # pragma: no cover

        # Before transforming any further, check that there are no local[] or delete[] further in, where
        # they don't belong. This allows the error message to show the *untransformed* source code for
        # the erroneous invocation. These checkers respect the boundaries of any nested do[].
        check_stray_localdefs(expr)
        check_stray_deletes(expr)

        # The envassignment transform (LHS) needs the updated bindings, whereas
        # the name transform (RHS) should use the previous bindings, so that any
        # changes to bindings take effect starting from the **next** do-item.
        updated_names = [x for x in names + newnames if x not in deletednames]
        expr = _letlike_transform(expr,
                                  e,
                                  lhsnames=updated_names,
                                  rhsnames=names,
                                  setter=envset)
        expr = q[h[namelambda](u[f"do_line{j}"])(a[expr])]
        names = updated_names
        lines.append(expr)
    # CAUTION: letdoutil.py depends on the literal name "dof" to detect expanded do forms.
    # Also, the views depend on the exact AST structure.
    # AST-unquoting a `list` of ASTs in the arguments position of a quasiquoted call
    # unpacks it into positional arguments.
    thecall = q[h[dof](a[lines])]
    return thecall
コード例 #10
0
ファイル: letdo.py プロジェクト: Technologicat/unpythonic
def _dletseq_impl(bindings, body, kind):
    # What we want:
    #
    # @dletseq[x << 1,
    #          x << x + 1,
    #          x << x + 2]
    # def g(*args, **kwargs):
    #     return x
    # assert g() == 4
    #
    # -->
    #
    # @dlet[x << 1]
    # def g(*args, **kwargs, e1):  # original args from tree go to the outermost def
    #   @dlet[x << x + 1]          # on RHS, important for e1.x to be in scope
    #   def g2(*, e2):
    #       @dlet[x << x + 2]
    #       def g3(*, e3):         # expansion proceeds from inside out
    #           return e3.x        # original args travel here by the closure property
    #       return g3()
    #   return g2()
    # assert g() == 4
    #
    assert kind in ("decorate", "call")
    if type(body) not in (FunctionDef, AsyncFunctionDef):
        raise SyntaxError(
            "Expected a function definition to decorate")  # pragma: no cover
    if not bindings:
        # Similarly as above, this cannot trigger from the macro layer no
        # matter what that layer does. This is here to optimize away a `dletseq`
        # with no bindings, when used directly from other syntax transformers.
        return body  # pragma: no cover

    userargs = body.args  # original arguments to the def
    fname = body.name
    noargs = arguments(args=[],
                       kwonlyargs=[],
                       vararg=None,
                       kwarg=None,
                       defaults=[],
                       kw_defaults=[])
    if sys.version_info >= (3, 8, 0):  # Python 3.8+: positional-only arguments
        noargs.posonlyargs = []
    iname = gensym(f"{fname}_inner")
    body.args = noargs
    body.name = iname

    *rest, last = bindings
    dletter = _dlet if kind == "decorate" else _blet
    innerdef = dletter([last], body)

    # optimization: in the final step, no need to generate a wrapper function
    if not rest:
        tmpargs = innerdef.args
        innerdef.name = fname
        innerdef.args = userargs
        # copy the env arg
        innerdef.args.kwonlyargs += tmpargs.kwonlyargs
        innerdef.args.kw_defaults += tmpargs.kw_defaults
        return innerdef

    # If kind=="decorate", the outer function needs to call the inner one
    # after defining it.
    # If kind=="call", then, after innerdef completes, the inner function has
    # already been replaced by its return value.
    ret = Return(value=q[n[iname]()]) if kind == "decorate" else Return(
        value=q[n[iname]])
    outer = FunctionDef(name=fname,
                        args=userargs,
                        body=[innerdef, ret],
                        decorator_list=[],
                        returns=None)  # no return type annotation
    return _dletseq_impl(rest, outer, kind)
コード例 #11
0
        def transform(self, tree):
            if is_captured_value(tree):
                return tree  # don't recurse!

            bindings = self.state.bindings
            enames = self.state.enames

            def isourupdate(thecall):
                if type(thecall.func) is not Attribute:
                    return False
                return thecall.func.attr == "update" and any(
                    isx(thecall.func.value, x) for x in enames)

            if isfunctionoruserlambda(tree):
                argnames = getargs(tree)
                if argnames:
                    # prepend env init to function body, update bindings
                    kws = [keyword(arg=k, value=q[n[k]])
                           for k in argnames]  # "x" --> x
                    newbindings = bindings.copy()
                    if type(tree) in (FunctionDef, AsyncFunctionDef):
                        ename = gensym("e")
                        theenv = q[h[_envify]()]
                        theenv.keywords = kws
                        with q as quoted:
                            n[ename] = a[theenv]
                        assignment = quoted[0]
                        tree.body.insert(0, assignment)
                    elif type(tree) is Lambda and id(tree) in userlambdas:
                        # We must in general inject a new do[] even if one is already there,
                        # due to scoping rules. If the user code writes to the same names in
                        # its do[] env, this shadows the formals; if it then pops one of its names,
                        # the name should revert to mean the formal parameter.
                        #
                        # inject a do[] and reuse its env
                        tree.body = _do(q[n["_here_"], a[tree.body]])
                        view = ExpandedDoView(
                            tree.body)  # view.body: [(lambda e14: ...), ...]
                        ename = view.body[0].args.args[
                            0].arg  # do[] environment name
                        theupdate = q[n[f"{ename}.update"]]
                        thecall = q[a[theupdate]()]
                        thecall.keywords = kws
                        tree.body = splice_expression(thecall, tree.body,
                                                      "_here_")
                    newbindings.update(
                        {k: q[n[f"{ename}.{k}"]]
                         for k in argnames})  # "x" --> e.x
                    self.generic_withstate(tree,
                                           enames=(enames + [ename]),
                                           bindings=newbindings)
            else:
                # leave alone the _envify() added by us
                if type(tree) is Call and (isx(tree.func, "_envify")
                                           or isourupdate(tree)):
                    # don't recurse
                    return tree
                # transform env-assignments into our envs
                elif isenvassign(tree):
                    view = UnexpandedEnvAssignView(tree)
                    if view.name in bindings.keys():
                        # Grab the envname from the actual binding of "varname", of the form `e.varname`
                        # (so it's the `id` of a `Name` that is the `value` of an `Attribute`).
                        envset = q[n[f"{bindings[view.name].value.id}.set"]]
                        newvalue = self.visit(view.value)
                        return q[a[envset](u[view.name], a[newvalue])]
                # transform references to currently active bindings
                # x --> e14.x
                # It doesn't matter if this hits an already expanded inner `with envify`,
                # because the gensymmed environment name won't be in our bindings, and the "x"
                # has become the `attr` in an `Attribute` node.
                elif type(tree) is Name and tree.id in bindings.keys():
                    # We must be careful to preserve the Load/Store/Del context of the name.
                    # The default lets `mcpyrate` fix it later.
                    ctx = tree.ctx if hasattr(tree, "ctx") else None
                    out = deepcopy(bindings[tree.id])
                    out.ctx = ctx
                    return out

            return self.generic_visit(tree)