Пример #1
0
def find_predecessors(graph, pending_pred):
    """Return the set of variables whose content can end up inside one
    of the 'pending_pred', which is a list of (block, var) tuples.
    """
    entrymap = mkentrymap(graph)
    if len(entrymap[graph.startblock]) != 1:
        insert_empty_startblock(graph)
        entrymap = mkentrymap(graph)

    pred = set([v for block, v in pending_pred])

    def add(block, v):
        if isinstance(v, Variable):
            if v not in pred:
                pending_pred.append((block, v))
                pred.add(v)

    while pending_pred:
        block, v = pending_pred.pop()
        if v in block.inputargs:
            var_index = block.inputargs.index(v)
            for link in entrymap[block]:
                prevblock = link.prevblock
                if prevblock is not None:
                    add(prevblock, link.args[var_index])
        else:
            for op in block.operations:
                if op.result is v:
                    if is_trivial_rewrite(op):
                        add(block, op.args[0])
                    break
    return pred
Пример #2
0
    def create_exception_handling(self, graph):
        """After an exception in a direct_call (or indirect_call), that is not caught
        by an explicit
        except statement, we need to reraise the exception. So after this
        direct_call we need to test if an exception had occurred. If so, we return
        from the current graph with a special value (False/-1/-1.0/null).
        Because of the added exitswitch we need an additional block.
        """
        if hasattr(graph, 'exceptiontransformed'):
            assert self.same_obj(self.exc_data_ptr, graph.exceptiontransformed)
            return
        else:
            self.raise_analyzer.analyze_direct_call(graph)
            graph.exceptiontransformed = self.exc_data_ptr

        join_blocks(graph)
        # collect the blocks before changing them
        n_need_exc_matching_blocks = 0
        n_gen_exc_checks           = 0
        #
        entrymap = mkentrymap(graph)
        if graph.exceptblock in entrymap:
            for link in entrymap[graph.exceptblock]:
                self.transform_jump_to_except_block(graph, entrymap, link)
        #
        for block in list(graph.iterblocks()):
            self.replace_fetch_restore_operations(block)
            need_exc_matching, gen_exc_checks = self.transform_block(graph, block)
            n_need_exc_matching_blocks += need_exc_matching
            n_gen_exc_checks           += gen_exc_checks
        cleanup_graph(graph)
        return n_need_exc_matching_blocks, n_gen_exc_checks
Пример #3
0
def merge_if_blocks_once(graph):
    """Convert consecutive blocks that all compare a variable (of Primitive type)
    with a constant into one block with multiple exits. The backends can in
    turn output this block as a switch statement.
    """
    candidates = [block for block in graph.iterblocks()
                      if is_chain_block(block, first=True)]
    entrymap = mkentrymap(graph)
    for firstblock in candidates:
        chain = []
        checkvars = []
        varmap = {}  # {var in a block in the chain: var in the first block}
        for var in firstblock.exits[0].args:
            varmap[var] = var
        for var in firstblock.exits[1].args:
            varmap[var] = var
        def add_to_varmap(var, newvar):
            if isinstance(var, Variable):
                varmap[newvar] = varmap[var]
            else:
                varmap[newvar] = var
        current = firstblock
        while 1:
            # check whether the chain can be extended with the block that follows the
            # False link
            checkvar = [var for var in current.operations[-1].args
                           if isinstance(var, Variable)][0]
            resvar = current.operations[-1].result
            case = [var for var in current.operations[-1].args
                       if isinstance(var, Constant)][0]
            checkvars.append(checkvar)
            falseexit = current.exits[0]
            assert not falseexit.exitcase
            trueexit = current.exits[1]
            targetblock = falseexit.target
            # if the result of the check is also passed through the link, we
            # cannot construct the chain
            if resvar in falseexit.args or resvar in trueexit.args:
                break
            chain.append((current, case))
            if len(entrymap[targetblock]) != 1:
                break
            if checkvar not in falseexit.args:
                break
            newcheckvar = targetblock.inputargs[falseexit.args.index(checkvar)]
            if not is_chain_block(targetblock):
                break
            if newcheckvar not in targetblock.operations[0].args:
                break
            for i, var in enumerate(trueexit.args):
                add_to_varmap(var, trueexit.target.inputargs[i])
            for i, var in enumerate(falseexit.args):
                add_to_varmap(var, falseexit.target.inputargs[i])
            current = targetblock
        if len(chain) > 1:
            break
    else:
        return False
    merge_chain(chain, checkvars[0], varmap, graph)
    return True
Пример #4
0
def merge_if_blocks_once(graph):
    """Convert consecutive blocks that all compare a variable (of Primitive type)
    with a constant into one block with multiple exits. The backends can in
    turn output this block as a switch statement.
    """
    candidates = [block for block in graph.iterblocks()
                      if is_chain_block(block, first=True)]
    entrymap = mkentrymap(graph)
    for firstblock in candidates:
        chain = []
        checkvars = []
        varmap = {}  # {var in a block in the chain: var in the first block}
        for var in firstblock.exits[0].args:
            varmap[var] = var
        for var in firstblock.exits[1].args:
            varmap[var] = var
        def add_to_varmap(var, newvar):
            if isinstance(var, Variable):
                varmap[newvar] = varmap[var]
            else:
                varmap[newvar] = var
        current = firstblock
        while 1:
            # check whether the chain can be extended with the block that follows the
            # False link
            checkvar = [var for var in current.operations[-1].args
                           if isinstance(var, Variable)][0]
            resvar = current.operations[-1].result
            case = [var for var in current.operations[-1].args
                       if isinstance(var, Constant)][0]
            checkvars.append(checkvar)
            falseexit = current.exits[0]
            assert not falseexit.exitcase
            trueexit = current.exits[1]
            targetblock = falseexit.target
            # if the result of the check is also passed through the link, we
            # cannot construct the chain
            if resvar in falseexit.args or resvar in trueexit.args:
                break
            chain.append((current, case))
            if len(entrymap[targetblock]) != 1:
                break
            if checkvar not in falseexit.args:
                break
            newcheckvar = targetblock.inputargs[falseexit.args.index(checkvar)]
            if not is_chain_block(targetblock):
                break
            if newcheckvar not in targetblock.operations[0].args:
                break
            for i, var in enumerate(trueexit.args):
                add_to_varmap(var, trueexit.target.inputargs[i])
            for i, var in enumerate(falseexit.args):
                add_to_varmap(var, falseexit.target.inputargs[i])
            current = targetblock
        if len(chain) > 1:
            break
    else:
        return False
    merge_chain(chain, checkvars[0], varmap, graph)
    return True
Пример #5
0
def test_find_initializing_stores_across_blocks():

    class A(object):
        pass
    class B(object):
        pass
    def f(x):
        a1 = A()
        a2 = A()
        a = A()
        b = B()
        b.a = a
        if x:
            b.b = a1
            b.c = a2
        else:
            b.c = a1
            b.b = a2
    t = rtype(f, [int])
    etrafo = ExceptionTransformer(t)
    graphs = etrafo.transform_completely()
    collect_analyzer = CollectAnalyzer(t)
    init_stores = find_initializing_stores(collect_analyzer, t.graphs[0],
                                           mkentrymap(t.graphs[0]))
    assert len(init_stores) == 5
Пример #6
0
    def test_keep_all_keepalives(self):
        SIZE = llmemory.sizeof(lltype.Signed)
        PARRAY = lltype.Ptr(lltype.FixedSizeArray(lltype.Signed, 1))
        class A:
            def __init__(self):
                self.addr = llmemory.raw_malloc(SIZE)
            def __del__(self):
                llmemory.raw_free(self.addr)
        class B:
            pass
        def myfunc():
            b = B()
            b.keep = A()
            b.data = llmemory.cast_adr_to_ptr(b.keep.addr, PARRAY)
            b.data[0] = 42
            ptr = b.data
            # normally 'b' could go away as early as here, which would free
            # the memory held by the instance of A in b.keep...
            res = ptr[0]
            # ...so we explicitly keep 'b' alive until here
            objectmodel.keepalive_until_here(b)
            return res
        graph = self.check(myfunc, [], [], 42,
                           must_be_removed=False)    # 'A' instance left

        # there is a getarrayitem near the end of the graph of myfunc.
        # However, the memory it accesses must still be protected by the
        # following keepalive, even after malloc removal
        entrymap = mkentrymap(graph)
        [link] = entrymap[graph.returnblock]
        assert link.prevblock.operations[-1].opname == 'keepalive'
Пример #7
0
def storesink_graph(graph):
    """ remove superfluous getfields. use a super-local method: all non-join
    blocks inherit the heap information from their (single) predecessor
    """
    added_some_same_as = False
    entrymap = mkentrymap(graph)

    # all merge blocks are starting points
    todo = [(block, None, None)
            for (block, prev_blocks) in entrymap.iteritems()
            if len(prev_blocks) > 1 or block is graph.startblock]

    visited = 0

    while todo:
        block, cache, inputlink = todo.pop()
        visited += 1
        if cache is None:
            cache = {}

        if block.operations:
            changed_block = _storesink_block(block, cache, inputlink)
            added_some_same_as = changed_block or added_some_same_as
        for link in block.exits:
            if len(entrymap[link.target]) == 1:
                new_cache = _translate_cache(cache, link)
                todo.append((link.target, new_cache, link))

    assert visited == len(entrymap)
    if added_some_same_as:
        removenoops.remove_same_as(graph)
        simplify.transform_dead_op_vars(graph)
Пример #8
0
def test_find_initializing_stores_across_blocks():

    class A(object):
        pass
    class B(object):
        pass
    def f(x):
        a1 = A()
        a2 = A()
        a = A()
        b = B()
        b.a = a
        if x:
            b.b = a1
            b.c = a2
        else:
            b.c = a1
            b.b = a2
    t = rtype(f, [int])
    etrafo = ExceptionTransformer(t)
    graphs = etrafo.transform_completely()
    collect_analyzer = CollectAnalyzer(t)
    init_stores = find_initializing_stores(collect_analyzer, t.graphs[0],
                                           mkentrymap(t.graphs[0]))
    assert len(init_stores) == 5
Пример #9
0
def storesink_graph(graph):
    """ remove superfluous getfields. use a super-local method: all non-join
    blocks inherit the heap information from their (single) predecessor
    """
    added_some_same_as = False
    entrymap = mkentrymap(graph)

    # all merge blocks are starting points
    todo = [(block, None, None) for (block, prev_blocks) in entrymap.iteritems()
                if len(prev_blocks) > 1 or block is graph.startblock]

    visited = 0

    while todo:
        block, cache, inputlink = todo.pop()
        visited += 1
        if cache is None:
            cache = {}

        if block.operations:
            changed_block = _storesink_block(block, cache, inputlink)
            added_some_same_as = changed_block or added_some_same_as
        for link in block.exits:
            if len(entrymap[link.target]) == 1:
                new_cache = _translate_cache(cache, link)
                todo.append((link.target, new_cache, link))

    assert visited == len(entrymap)
    if added_some_same_as:
        removenoops.remove_same_as(graph)
        simplify.transform_dead_op_vars(graph)
Пример #10
0
    def test_keep_all_keepalives(self):
        SIZE = llmemory.sizeof(lltype.Signed)
        PARRAY = lltype.Ptr(lltype.FixedSizeArray(lltype.Signed, 1))
        class A:
            def __init__(self):
                self.addr = llmemory.raw_malloc(SIZE)
            def __del__(self):
                llmemory.raw_free(self.addr)
        class B:
            pass
        def myfunc():
            b = B()
            b.keep = A()
            b.data = llmemory.cast_adr_to_ptr(b.keep.addr, PARRAY)
            b.data[0] = 42
            ptr = b.data
            # normally 'b' could go away as early as here, which would free
            # the memory held by the instance of A in b.keep...
            res = ptr[0]
            # ...so we explicitly keep 'b' alive until here
            objectmodel.keepalive_until_here(b)
            return res
        graph = self.check(myfunc, [], [], 42,
                           must_be_removed=False)    # 'A' instance left

        # there is a getarrayitem near the end of the graph of myfunc.
        # However, the memory it accesses must still be protected by the
        # following keepalive, even after malloc removal
        entrymap = mkentrymap(graph)
        [link] = entrymap[graph.returnblock]
        assert link.prevblock.operations[-1].opname == 'keepalive'
Пример #11
0
def remove_trivial_links(graph):
    """Remove trivial links by merging their source and target blocks

    A link is trivial if it has no arguments, is the single exit of its
    source and the single parent of its target.
    """
    entrymap = mkentrymap(graph)
    block = graph.startblock
    seen = set([block])
    stack = list(block.exits)
    while stack:
        link = stack.pop()
        if link.target in seen:
            continue
        source = link.prevblock
        target = link.target
        if (not link.args and source.exitswitch is None and
                len(entrymap[target]) == 1 and
                target.exits):  # stop at the returnblock
            assert len(source.exits) == 1
            source.operations.extend(target.operations)
            source.exitswitch = newexitswitch = target.exitswitch
            source.recloseblock(*target.exits)
            stack.extend(source.exits)
        else:
            seen.add(target)
            stack.extend(target.exits)
Пример #12
0
def remove_trivial_links(graph):
    """Remove trivial links by merging their source and target blocks

    A link is trivial if it has no arguments, is the single exit of its
    source and the single parent of its target.
    """
    entrymap = mkentrymap(graph)
    block = graph.startblock
    seen = set([block])
    stack = list(block.exits)
    while stack:
        link = stack.pop()
        if link.target in seen:
            continue
        source = link.prevblock
        target = link.target
        if (not link.args and source.exitswitch is None
                and len(entrymap[target]) == 1
                and target.exits):  # stop at the returnblock
            assert len(source.exits) == 1
            source.operations.extend(target.operations)
            source.exitswitch = newexitswitch = target.exitswitch
            source.recloseblock(*target.exits)
            stack.extend(source.exits)
        else:
            seen.add(target)
            stack.extend(target.exits)
Пример #13
0
 def __init__(self, graph):
     # Build a list of "unification opportunities": for each block and each
     # 'n', an "opportunity" groups the block's nth input variable with
     # the nth output variable from each of the incoming links, in a list:
     # [Block, blockvar, linkvar, linkvar, linkvar...]
     opportunities = []
     opportunities_with_const = []
     entrymap = mkentrymap(graph)
     del entrymap[graph.startblock]
     for block, links in entrymap.items():
         assert links
         for n, inputvar in enumerate(block.inputargs):
             vars = [block, inputvar]
             put_in = opportunities
             for link in links:
                 var = link.args[n]
                 if not isinstance(var, Variable):
                     put_in = opportunities_with_const
                 vars.append(var)
             # if any link provides a Constant, record this in
             # the opportunities_with_const list instead
             put_in.append(vars)
     self.opportunities = opportunities
     self.opportunities_with_const = opportunities_with_const
     self.variable_families = UnionFind()
Пример #14
0
    def create_exception_handling(self, graph):
        """After an exception in a direct_call (or indirect_call), that is not caught
        by an explicit
        except statement, we need to reraise the exception. So after this
        direct_call we need to test if an exception had occurred. If so, we return
        from the current graph with a special value (False/-1/-1.0/null).
        Because of the added exitswitch we need an additional block.
        """
        if hasattr(graph, 'exceptiontransformed'):
            assert self.same_obj(self.exc_data_ptr, graph.exceptiontransformed)
            return
        else:
            self.raise_analyzer.analyze_direct_call(graph)
            graph.exceptiontransformed = self.exc_data_ptr

        join_blocks(graph)
        # collect the blocks before changing them
        n_need_exc_matching_blocks = 0
        n_gen_exc_checks = 0
        #
        entrymap = mkentrymap(graph)
        if graph.exceptblock in entrymap:
            for link in entrymap[graph.exceptblock]:
                self.transform_jump_to_except_block(graph, entrymap, link)
        #
        for block in list(graph.iterblocks()):
            self.replace_fetch_restore_operations(block)
            need_exc_matching, gen_exc_checks = self.transform_block(
                graph, block)
            n_need_exc_matching_blocks += need_exc_matching
            n_gen_exc_checks += gen_exc_checks
        cleanup_graph(graph)
        return n_need_exc_matching_blocks, n_gen_exc_checks
Пример #15
0
    def cfunction_body(self):
        graph = self.graph

        # ----- for gc_enter_roots_frame
        _seen = set()
        for block in graph.iterblocks():
            for op in block.operations:
                if op.opname == 'gc_enter_roots_frame':
                    _seen.add(tuple(op.args))
        if _seen:
            assert len(_seen) == 1, (
                "multiple different gc_enter_roots_frame in %r" % (graph,))
            for line in self.gcpolicy.enter_roots_frame(self, list(_seen)[0]):
                yield line
        # ----- done

        # Locate blocks with a single predecessor, which can be written
        # inline in place of a "goto":
        entrymap = mkentrymap(graph)
        self.inlinable_blocks = {
            block for block in entrymap if len(entrymap[block]) == 1}

        yield ''
        for line in self.gen_goto(graph.startblock):
            yield line

        # Only blocks left are those that have more than one predecessor.
        for block in graph.iterblocks():
            if block in self.inlinable_blocks:
                continue
            for line in self.gen_block(block):
                yield line
Пример #16
0
def remove_identical_vars(graph):
    """When the same variable is passed multiple times into the next block,
    pass it only once.  This enables further optimizations by the annotator,
    which otherwise doesn't realize that tests performed on one of the copies
    of the variable also affect the other."""

    # This algorithm is based on DataFlowFamilyBuilder, used as a
    # "phi node remover" (in the SSA sense).  'variable_families' is a
    # UnionFind object that groups variables by families; variables from the
    # same family can be identified, and if two input arguments of a block
    # end up in the same family, then we really remove one of them in favor
    # of the other.
    #
    # The idea is to identify as much variables as possible by trying
    # iteratively two kinds of phi node removal:
    #
    #  * "vertical", by identifying variables from different blocks, when
    #    we see that a value just flows unmodified into the next block without
    #    needing any merge (this is what backendopt.ssa.SSI_to_SSA() would do
    #    as well);
    #
    #  * "horizontal", by identifying two input variables of the same block,
    #    when these two variables' phi nodes have the same argument -- i.e.
    #    when for all possible incoming paths they would get twice the same
    #    value (this is really the purpose of remove_identical_vars()).
    #
    builder = DataFlowFamilyBuilder(graph)
    variable_families = builder.get_variable_families()  # vertical removal
    while True:
        if not builder.merge_identical_phi_nodes():  # horizontal removal
            break
        if not builder.complete():  # vertical removal
            break

    for block, links in mkentrymap(graph).items():
        if block is graph.startblock:
            continue
        renaming = {}
        family2blockvar = {}
        kills = []
        for i, v in enumerate(block.inputargs):
            v1 = variable_families.find_rep(v)
            if v1 in family2blockvar:
                # already seen -- this variable can be shared with the
                # previous one
                renaming[v] = family2blockvar[v1]
                kills.append(i)
            else:
                family2blockvar[v1] = v
        if renaming:
            block.renamevariables(renaming)
            # remove the now-duplicate input variables
            kills.reverse()  # starting from the end
            for i in kills:
                del block.inputargs[i]
                for link in links:
                    del link.args[i]
Пример #17
0
def remove_identical_vars(graph):
    """When the same variable is passed multiple times into the next block,
    pass it only once.  This enables further optimizations by the annotator,
    which otherwise doesn't realize that tests performed on one of the copies
    of the variable also affect the other."""

    # This algorithm is based on DataFlowFamilyBuilder, used as a
    # "phi node remover" (in the SSA sense).  'variable_families' is a
    # UnionFind object that groups variables by families; variables from the
    # same family can be identified, and if two input arguments of a block
    # end up in the same family, then we really remove one of them in favor
    # of the other.
    #
    # The idea is to identify as much variables as possible by trying
    # iteratively two kinds of phi node removal:
    #
    #  * "vertical", by identifying variables from different blocks, when
    #    we see that a value just flows unmodified into the next block without
    #    needing any merge (this is what backendopt.ssa.SSI_to_SSA() would do
    #    as well);
    #
    #  * "horizontal", by identifying two input variables of the same block,
    #    when these two variables' phi nodes have the same argument -- i.e.
    #    when for all possible incoming paths they would get twice the same
    #    value (this is really the purpose of remove_identical_vars()).
    #
    builder = ssa.DataFlowFamilyBuilder(graph)
    variable_families = builder.get_variable_families()  # vertical removal
    while True:
        if not builder.merge_identical_phi_nodes():  # horizontal removal
            break
        if not builder.complete():  # vertical removal
            break

    for block, links in mkentrymap(graph).items():
        if block is graph.startblock:
            continue
        renaming = {}
        family2blockvar = {}
        kills = []
        for i, v in enumerate(block.inputargs):
            v1 = variable_families.find_rep(v)
            if v1 in family2blockvar:
                # already seen -- this variable can be shared with the
                # previous one
                renaming[v] = family2blockvar[v1]
                kills.append(i)
            else:
                family2blockvar[v1] = v
        if renaming:
            block.renamevariables(renaming)
            # remove the now-duplicate input variables
            kills.reverse()  # starting from the end
            for i in kills:
                del block.inputargs[i]
                for link in links:
                    del link.args[i]
Пример #18
0
def remove_identical_vars_SSA(graph):
    """When the same variable is passed multiple times into the next block,
    pass it only once.  This enables further optimizations by the annotator,
    which otherwise doesn't realize that tests performed on one of the copies
    of the variable also affect the other."""
    uf = UnionFind(Representative)
    entrymap = mkentrymap(graph)
    del entrymap[graph.startblock]
    entrymap.pop(graph.returnblock, None)
    entrymap.pop(graph.exceptblock, None)
    inputs = {}
    for block, links in entrymap.items():
        phis = zip(block.inputargs, zip(*[link.args for link in links]))
        inputs[block] = phis

    def simplify_phis(block):
        phis = inputs[block]
        to_remove = []
        unique_phis = {}
        for i, (input, phi_args) in enumerate(phis):
            new_args = [uf.find_rep(arg) for arg in phi_args]
            if all_equal(new_args) and not isspecialvar(new_args[0]):
                uf.union(new_args[0], input)
                to_remove.append(i)
            else:
                t = tuple(new_args)
                if t in unique_phis:
                    uf.union(unique_phis[t], input)
                    to_remove.append(i)
                else:
                    unique_phis[t] = input
        for i in reversed(to_remove):
            del phis[i]
        return bool(to_remove)

    progress = True
    while progress:
        progress = False
        for block in inputs:
            if simplify_phis(block):
                progress = True

    renaming = dict((key, uf[key].rep) for key in uf)
    for block, links in entrymap.items():
        if inputs[block]:
            new_inputs, new_args = zip(*inputs[block])
            new_args = map(list, zip(*new_args))
        else:
            new_inputs = []
            new_args = [[] for _ in links]
        block.inputargs = new_inputs
        assert len(links) == len(new_args)
        for link, args in zip(links, new_args):
            link.args = args
    for block in entrymap:
        block.renamevariables(renaming)
Пример #19
0
def remove_identical_vars_SSA(graph):
    """When the same variable is passed multiple times into the next block,
    pass it only once.  This enables further optimizations by the annotator,
    which otherwise doesn't realize that tests performed on one of the copies
    of the variable also affect the other."""
    uf = UnionFind(Representative)
    entrymap = mkentrymap(graph)
    del entrymap[graph.startblock]
    entrymap.pop(graph.returnblock, None)
    entrymap.pop(graph.exceptblock, None)
    inputs = {}
    for block, links in entrymap.items():
        phis = zip(block.inputargs, zip(*[link.args for link in links]))
        inputs[block] = phis

    def simplify_phis(block):
        phis = inputs[block]
        to_remove = []
        unique_phis = {}
        for i, (input, phi_args) in enumerate(phis):
            new_args = [uf.find_rep(arg) for arg in phi_args]
            if all_equal(new_args) and not isspecialvar(new_args[0]):
                uf.union(new_args[0], input)
                to_remove.append(i)
            else:
                t = tuple(new_args)
                if t in unique_phis:
                    uf.union(unique_phis[t], input)
                    to_remove.append(i)
                else:
                    unique_phis[t] = input
        for i in reversed(to_remove):
            del phis[i]
        return bool(to_remove)

    progress = True
    while progress:
        progress = False
        for block in inputs:
            if simplify_phis(block):
                progress = True

    renaming = dict((key, uf[key].rep) for key in uf)
    for block, links in entrymap.items():
        if inputs[block]:
            new_inputs, new_args = zip(*inputs[block])
            new_args = map(list, zip(*new_args))
        else:
            new_inputs = []
            new_args = [[] for _ in links]
        block.inputargs = new_inputs
        assert len(links) == len(new_args)
        for link, args in zip(links, new_args):
            link.args = args
    for block in graph.iterblocks():
        block.renamevariables(renaming)
Пример #20
0
 def test_multiple_catch_simple_call(self):
     graph = self.codetest(self.multiple_catch_simple_call)
     simplify_graph(graph)
     assert self.all_operations(graph) == {'simple_call': 1}
     entrymap = mkentrymap(graph)
     links = entrymap[graph.returnblock]
     assert len(links) == 3
     assert (dict.fromkeys([link.exitcase for link in links]) ==
             dict.fromkeys([None, IndexError, OSError]))
     links = entrymap[graph.exceptblock]
     assert len(links) == 1
     assert links[0].exitcase is Exception
Пример #21
0
def join_blocks(graph):
    """Links can be deleted if they are the single exit of a block and
    the single entry point of the next block.  When this happens, we can
    append all the operations of the following block to the preceeding
    block (but renaming variables with the appropriate arguments.)
    """
    entrymap = mkentrymap(graph)
    block = graph.startblock
    seen = {block: True}
    stack = list(block.exits)
    while stack:
        link = stack.pop()
        if (link.prevblock.exitswitch is None
                and len(entrymap[link.target]) == 1
                and link.target.exits):  # stop at the returnblock
            assert len(link.prevblock.exits) == 1
            renaming = {}
            for vprev, vtarg in zip(link.args, link.target.inputargs):
                renaming[vtarg] = vprev

            def rename_op(op):
                op = op.replace(renaming)
                # special case...
                if op.opname == 'indirect_call':
                    if isinstance(op.args[0], Constant):
                        assert isinstance(op.args[-1], Constant)
                        del op.args[-1]
                        op.opname = 'direct_call'
                return op

            for op in link.target.operations:
                link.prevblock.operations.append(rename_op(op))
            exits = []
            for exit in link.target.exits:
                newexit = exit.replace(renaming)
                exits.append(newexit)
            if link.target.exitswitch:
                newexitswitch = link.target.exitswitch.replace(renaming)
            else:
                newexitswitch = None
            link.prevblock.exitswitch = newexitswitch
            link.prevblock.recloseblock(*exits)
            if (isinstance(newexitswitch, Constant)
                    and not link.prevblock.canraise):
                exits = replace_exitswitch_by_constant(link.prevblock,
                                                       newexitswitch)
            stack.extend(exits)
        else:
            if link.target not in seen:
                stack.extend(link.target.exits)
                seen[link.target] = True
Пример #22
0
def join_blocks(graph):
    """Links can be deleted if they are the single exit of a block and
    the single entry point of the next block.  When this happens, we can
    append all the operations of the following block to the preceeding
    block (but renaming variables with the appropriate arguments.)
    """
    entrymap = mkentrymap(graph)
    block = graph.startblock
    seen = {block: True}
    stack = list(block.exits)
    while stack:
        link = stack.pop()
        if (
            link.prevblock.exitswitch is None and len(entrymap[link.target]) == 1 and link.target.exits
        ):  # stop at the returnblock
            assert len(link.prevblock.exits) == 1
            renaming = {}
            for vprev, vtarg in zip(link.args, link.target.inputargs):
                renaming[vtarg] = vprev

            def rename(v):
                return renaming.get(v, v)

            def rename_op(op):
                op = op.replace(renaming)
                # special case...
                if op.opname == "indirect_call":
                    if isinstance(op.args[0], Constant):
                        assert isinstance(op.args[-1], Constant)
                        del op.args[-1]
                        op.opname = "direct_call"
                return op

            for op in link.target.operations:
                link.prevblock.operations.append(rename_op(op))
            exits = []
            for exit in link.target.exits:
                newexit = exit.copy(rename)
                exits.append(newexit)
            newexitswitch = rename(link.target.exitswitch)
            link.prevblock.exitswitch = newexitswitch
            link.prevblock.recloseblock(*exits)
            if isinstance(newexitswitch, Constant) and newexitswitch != c_last_exception:
                exits = replace_exitswitch_by_constant(link.prevblock, newexitswitch)
            stack.extend(exits)
        else:
            if link.target not in seen:
                stack.extend(link.target.exits)
                seen[link.target] = True
Пример #23
0
 def test_break_from_handler(self):
     def f(x):
         while True:
             try:
                 x()
             except TypeError:
                 if x:
                     raise
                 break
     assert f(0) is None
     graph = self.codetest(f)
     simplify_graph(graph)
     entrymap = mkentrymap(graph)
     links = entrymap[graph.returnblock]
     assert len(links) == 1
Пример #24
0
def mkinsideentrymap(graph_or_blocks):
    # graph_or_blocks can be a full FunctionGraph, or a mapping
    # {block: reachable-from-outside-flag}.
    if isinstance(graph_or_blocks, dict):
        blocks = graph_or_blocks
        entrymap = {}
        for block in blocks:
            for link in block.exits:
                if link.target in blocks and not blocks[link.target]:
                    entrymap.setdefault(link.target, []).append(link)
        return entrymap
    else:
        graph = graph_or_blocks
        entrymap = mkentrymap(graph)
        del entrymap[graph.startblock]
        return entrymap
Пример #25
0
def constant_diffuse(graph):
    count = 0
    # after 'exitswitch vexit', replace 'vexit' with the corresponding constant
    # if it also appears on the outgoing links
    for block in graph.iterblocks():
        vexit = block.exitswitch
        if isinstance(vexit, Variable):
            for link in block.exits:
                if vexit in link.args and link.exitcase != 'default':
                    remap = {
                        vexit: Constant(link.llexitcase, vexit.concretetype)
                    }
                    link.args = [remap.get(v, v) for v in link.args]
                    count += 1
    # if the same constants appear at the same positions in all links
    # into a block remove them from the links, remove the corresponding
    # input variables and introduce equivalent same_as at the beginning
    # of the block then try to fold the block further
    for block, links in mkentrymap(graph).iteritems():
        if block is graph.startblock:
            continue
        if block.exits == ():
            continue
        firstlink = links[0]
        rest = links[1:]
        diffuse = []
        for i, c in enumerate(firstlink.args):
            if not isinstance(c, Constant):
                continue
            for lnk in rest:
                if lnk.args[i] != c:
                    break
            else:
                diffuse.append((i, c))
        diffuse.reverse()
        same_as = []
        for i, c in diffuse:
            for lnk in links:
                del lnk.args[i]
            v = block.inputargs.pop(i)
            same_as.append(SpaceOperation('same_as', [c], v))
            count += 1
        block.operations = same_as + block.operations
        if same_as:
            constant_fold_block(block)
    return count
Пример #26
0
def constant_diffuse(graph):
    count = 0
    # after 'exitswitch vexit', replace 'vexit' with the corresponding constant
    # if it also appears on the outgoing links
    for block in graph.iterblocks():
        vexit = block.exitswitch
        if isinstance(vexit, Variable):
            for link in block.exits:
                if vexit in link.args and link.exitcase != 'default':
                    remap = {vexit: Constant(link.llexitcase,
                                             vexit.concretetype)}
                    link.args = [remap.get(v, v) for v in link.args]
                    count += 1
    # if the same constants appear at the same positions in all links
    # into a block remove them from the links, remove the corresponding
    # input variables and introduce equivalent same_as at the beginning
    # of the block then try to fold the block further
    for block, links in mkentrymap(graph).iteritems():
        if block is graph.startblock:
            continue
        if block.exits == ():
            continue
        firstlink = links[0]
        rest = links[1:]
        diffuse = []
        for i, c in enumerate(firstlink.args):
            if not isinstance(c, Constant):
                continue
            for lnk in rest:
                if lnk.args[i] != c:
                    break
            else:
                diffuse.append((i, c))
        diffuse.reverse()
        same_as = []
        for i, c in diffuse:
            for lnk in links:
                del lnk.args[i]
            v = block.inputargs.pop(i)
            same_as.append(SpaceOperation('same_as', [c], v))
            count += 1
        block.operations = same_as + block.operations
        if same_as:
            constant_fold_block(block)
    return count
Пример #27
0
def remove_tail_calls_to_self(translator, graph):
    entrymap = mkentrymap(graph)
    changed = False
    for link in entrymap[graph.returnblock]:
        block = link.prevblock
        if (len(block.exits) == 1 and len(block.operations) > 0
                and block.operations[-1].opname == 'direct_call'
                and block.operations[-1].result == link.args[0]):
            print "getgraph", graph
            if graph is graph:
                _remove_tail_call(translator, graph, block)
                changed = True
    if changed:
        from rpython.translator import simplify
        checkgraph(graph)
        simplify.remove_identical_vars(graph)
        simplify.eliminate_empty_blocks(graph)
        simplify.join_blocks(graph)
Пример #28
0
def test_find_initializing_stores():

    class A(object):
        pass
    class B(object):
        pass
    def f():
        a = A()
        b = B()
        b.a = a
        b.b = 1
    t = rtype(f, [])
    etrafo = ExceptionTransformer(t)
    graphs = etrafo.transform_completely()
    collect_analyzer = CollectAnalyzer(t)
    init_stores = find_initializing_stores(collect_analyzer, t.graphs[0],
                                           mkentrymap(t.graphs[0]))
    assert len(init_stores) == 1
Пример #29
0
 def inline_once(self, block, index_operation):
     self.varmap = {}
     self._copied_blocks = {}
     self.op = block.operations[index_operation]
     self.graph_to_inline = self.get_graph_from_op(self.op)
     self.exception_guarded = False
     if self.op is block.raising_op:
         self.exception_guarded = True
         if self.inline_guarded_calls:
             if (not self.inline_guarded_calls_no_matter_what and
                 does_raise_directly(self.graph_to_inline, self.raise_analyzer)):
                 raise CannotInline("can't inline because the call is exception guarded")
         elif any_call_to_raising_graphs(self.graph_to_inline,
                                         self.translator, self.raise_analyzer):
             raise CannotInline("can't handle exceptions")
     self._passon_vars = {}
     self.entrymap = mkentrymap(self.graph_to_inline)
     self.do_inline(block, index_operation)
Пример #30
0
def test_find_initializing_stores():

    class A(object):
        pass
    class B(object):
        pass
    def f():
        a = A()
        b = B()
        b.a = a
        b.b = 1
    t = rtype(f, [])
    etrafo = ExceptionTransformer(t)
    graphs = etrafo.transform_completely()
    collect_analyzer = CollectAnalyzer(t)
    init_stores = find_initializing_stores(collect_analyzer, t.graphs[0],
                                           mkentrymap(t.graphs[0]))
    assert len(init_stores) == 1
Пример #31
0
 def inline_once(self, block, index_operation):
     self.varmap = {}
     self._copied_blocks = {}
     self.op = block.operations[index_operation]
     self.graph_to_inline = self.get_graph_from_op(self.op)
     self.exception_guarded = False
     if self.op is block.raising_op:
         self.exception_guarded = True
         if self.inline_guarded_calls:
             if (not self.inline_guarded_calls_no_matter_what and
                 does_raise_directly(self.graph_to_inline, self.raise_analyzer)):
                 raise CannotInline("can't inline because the call is exception guarded")
         elif any_call_to_raising_graphs(self.graph_to_inline,
                                         self.translator, self.raise_analyzer):
             raise CannotInline("can't handle exceptions")
     self._passon_vars = {}
     self.entrymap = mkentrymap(self.graph_to_inline)
     self.do_inline(block, index_operation)
Пример #32
0
def remove_tail_calls_to_self(translator, graph):
    entrymap = mkentrymap(graph)
    changed = False
    for link in entrymap[graph.returnblock]:
        block = link.prevblock
        if (len(block.exits) == 1 and
            len(block.operations) > 0 and
            block.operations[-1].opname == 'direct_call' and
            block.operations[-1].result == link.args[0]):
            print "getgraph", graph
            if graph is graph:
                _remove_tail_call(translator, graph, block)
                changed = True
    if changed:
        from rpython.translator import simplify
        checkgraph(graph)
        simplify.remove_identical_vars(graph)
        simplify.eliminate_empty_blocks(graph)
        simplify.join_blocks(graph)
Пример #33
0
    def cfunction_body(self):
        graph = self.graph

        # Locate blocks with a single predecessor, which can be written
        # inline in place of a "goto":
        entrymap = mkentrymap(graph)
        self.inlinable_blocks = {
            block for block in entrymap if len(entrymap[block]) == 1}

        yield ''
        for line in self.gen_goto(graph.startblock):
            yield line

        # Only blocks left are those that have more than one predecessor.
        for block in graph.iterblocks():
            if block in self.inlinable_blocks:
                continue
            for line in self.gen_block(block):
                yield line
Пример #34
0
def transform_ovfcheck(graph):
    """The special function calls ovfcheck needs to
    be translated into primitive operations. ovfcheck is called directly
    after an operation that should be turned into an overflow-checked
    version. It is considered a syntax error if the resulting <op>_ovf
    is not defined in objspace/flow/objspace.py.
    """
    covf = Constant(rarithmetic.ovfcheck)

    def check_syntax(opname):
        oper = getattr(operation.op, opname + "_ovf")
        exlis = oper.canraise
        if OverflowError not in exlis:
            raise Exception("ovfcheck in %s: Operation %s has no"
                            " overflow variant" % (graph.name, opname))

    for block in graph.iterblocks():
        for i in range(len(block.operations)-1, -1, -1):
            op = block.operations[i]
            if op.opname != 'simple_call':
                continue
            if op.args[0] == covf:
                if i == 0:
                    # hard case: ovfcheck() on an operation that occurs
                    # in the previous block, like 'floordiv'.  The generic
                    # exception handling around the ovfcheck() is enough
                    # to cover all cases; kill the one around the previous op.
                    entrymap = mkentrymap(graph)
                    links = entrymap[block]
                    assert len(links) == 1
                    prevblock = links[0].prevblock
                    assert prevblock.exits[0].target is block
                    prevblock.exitswitch = None
                    prevblock.exits = (links[0],)
                    join_blocks(graph)         # merge the two blocks together
                    transform_ovfcheck(graph)  # ...and try again
                    return
                op1 = block.operations[i-1]
                check_syntax(op1.opname)
                op1.opname += '_ovf'
                del block.operations[i]
                block.renamevariables({op.result: op1.result})
Пример #35
0
    def cfunction_body(self):
        if self.db.reverse_debugger:
            from rpython.translator.revdb import gencsupp
            (extra_enter_text,
             self.extra_return_text) = (gencsupp.prepare_function(self))
            if extra_enter_text:
                yield extra_enter_text
        graph = self.graph

        # ----- for gc_enter_roots_frame
        _seen = set()
        for block in graph.iterblocks():
            for op in block.operations:
                if op.opname == 'gc_enter_roots_frame':
                    _seen.add(tuple(op.args))
        if _seen:
            assert len(_seen) == 1, (
                "multiple different gc_enter_roots_frame in %r" % (graph, ))
            for line in self.gcpolicy.enter_roots_frame(self, list(_seen)[0]):
                yield line
        # ----- done

        # Locate blocks with a single predecessor, which can be written
        # inline in place of a "goto":
        entrymap = mkentrymap(graph)
        self.inlinable_blocks = {
            block
            for block in entrymap if len(entrymap[block]) == 1
        }

        yield ''
        for line in self.gen_goto(graph.startblock):
            yield line

        # Only blocks left are those that have more than one predecessor.
        for block in graph.iterblocks():
            if block in self.inlinable_blocks:
                continue
            for line in self.gen_block(block):
                yield line
Пример #36
0
def transform_ovfcheck(graph):
    """The special function calls ovfcheck needs to
    be translated into primitive operations. ovfcheck is called directly
    after an operation that should be turned into an overflow-checked
    version. It is considered a syntax error if the resulting <op>_ovf
    is not defined in objspace/flow/objspace.py.
    """
    covf = Constant(rarithmetic.ovfcheck)

    for block in graph.iterblocks():
        for i in range(len(block.operations) - 1, -1, -1):
            op = block.operations[i]
            if op.opname != 'simple_call':
                continue
            if op.args[0] == covf:
                if i == 0:
                    # hard case: ovfcheck() on an operation that occurs
                    # in the previous block, like 'floordiv'.  The generic
                    # exception handling around the ovfcheck() is enough
                    # to cover all cases; kill the one around the previous op.
                    entrymap = mkentrymap(graph)
                    links = entrymap[block]
                    assert len(links) == 1
                    prevblock = links[0].prevblock
                    assert prevblock.exits[0].target is block
                    prevblock.exitswitch = None
                    prevblock.exits = (links[0], )
                    join_blocks(graph)  # merge the two blocks together
                    transform_ovfcheck(graph)  # ...and try again
                    return
                op1 = block.operations[i - 1]
                if not isinstance(op1, OverflowingOperation):
                    raise Exception("ovfcheck in %s: Operation %s has no "
                                    "overflow variant" %
                                    (graph.name, op1.opname))
                op1_ovf = op1.ovfchecked()
                block.operations[i - 1] = op1_ovf
                del block.operations[i]
                block.renamevariables({op.result: op1_ovf.result})
Пример #37
0
def move_pushes_earlier(graph, regalloc):
    """gc_push_roots and gc_pop_roots are pushes/pops to the shadowstack,
    immediately enclosing the operation that needs them (typically a call).
    Here, we try to move individual pushes earlier.

    Should run after expand_push_roots(), but before expand_pop_roots(),
    so that it sees individual 'gc_save_root' operations but bulk
    'gc_pop_roots' operations.
    """
    # Concrete example (assembler tested on x86-64 gcc 5.3 and clang 3.7):
    #
    # ----original----           ----move_pushes_earlier----
    #
    # while (a > 10) {           *foo = b;
    #     *foo = b;              while (a > 10) {
    #     a = g(a);                  a = g(a);
    #     b = *foo;                  b = *foo;
    #                                // *foo = b;
    # }                          }
    # return b;                  return b;
    #
    # => the store and the       => the store is before, and gcc/clang
    # load are in the loop,      moves the load after the loop
    # even in the assembler      (the commented-out '*foo=b' is removed
    #                            here, but gcc/clang would also remove it)

    # Draft of the algorithm: see shadowcolor.txt

    if not regalloc:
        return

    entrymap = mkentrymap(graph)
    assert len(entrymap[graph.startblock]) == 1

    inputvars = {}    # {inputvar: (its block, its index in inputargs)}
    for block in graph.iterblocks():
        for i, v in enumerate(block.inputargs):
            inputvars[v] = (block, i)

    Plist = []

    for index in range(regalloc.numcolors):
        U = UnionFind()

        S = set()
        for block in graph.iterblocks():
            for op in reversed(block.operations):
                if op.opname == 'gc_pop_roots':
                    break
            else:
                continue   # no gc_pop_roots in this block
            for v in op.args:
                if isinstance(v, Variable) and regalloc.checkcolor(v, index):
                    break
            else:
                continue   # no variable goes into index i

            succ = set()
            pending_succ = [(block, v)]
            while pending_succ:
                block1, v1 = pending_succ.pop()
                assert regalloc.checkcolor(v1, index)
                for op1 in block1.operations:
                    if is_trivial_rewrite(op1) and op1.args[0] is v1:
                        if regalloc.checkcolor(op1.result, index):
                            pending_succ.append((block1, op1.result))
                for link1 in block1.exits:
                    for i2, v2 in enumerate(link1.args):
                        if v2 is not v1:
                            continue
                        block2 = link1.target
                        w2 = block2.inputargs[i2]
                        if w2 in succ or not regalloc.checkcolor(w2, index):
                            continue
                        succ.add(w2)
                        for op2 in block2.operations:
                            if op2.opname in ('gc_save_root', 'gc_pop_roots'):
                                break
                        else:
                            pending_succ.append((block2, w2))
            U.union_list(list(succ))
            S.update(succ)

        G = defaultdict(set)
        for block in graph.iterblocks():
            found = False
            for opindex, op in enumerate(block.operations):
                if op.opname == 'gc_save_root':
                    if (isinstance(op.args[1], Constant) and
                        op.args[1].concretetype == lltype.Signed):
                        break
                    elif op.args[0].value == index:
                        found = True
                        break
            if not found or not isinstance(op.args[1], Variable):
                continue   # no matching gc_save_root in this block

            key = (block, op)
            pred = set()
            pending_pred = [(block, op.args[1], opindex)]
            while pending_pred:
                block1, v1, opindex1 = pending_pred.pop()
                assert regalloc.getcolor(v1) == index
                for i in range(opindex1-1, -1, -1):
                    op1 = block1.operations[i]
                    if op1.opname == 'gc_pop_roots':
                        break    # stop
                    if op1.result is v1:
                        if not is_trivial_rewrite(op1):
                            break   # stop
                        if not regalloc.checkcolor(op1.args[0], index):
                            break   # stop
                        v1 = op1.args[0]
                else:
                    varindex = block1.inputargs.index(v1)
                    if v1 in pred:
                        continue    # already done
                    pred.add(v1)
                    for link1 in entrymap[block1]:
                        prevblock1 = link1.prevblock
                        if prevblock1 is not None:
                            w1 = link1.args[varindex]
                            if isinstance(w1, Variable) and w1 not in pred:
                                if regalloc.checkcolor(w1, index):
                                    pending_pred.append((prevblock1, w1,
                                                len(prevblock1.operations)))
            U.union_list(list(pred))
            for v1 in pred:
                G[v1].add(key)

        M = S.intersection(G)

        parts_target = {}
        for v in M:
            vp = U.find_rep(v)
            if vp not in parts_target:
                new_part = (index, set(), set())
                # (index,
                #  subset P of variables,
                #  set of (block, gc_save_root))
                Plist.append(new_part)
                parts_target[vp] = new_part
            part = parts_target[vp]
            part[1].add(v)
            part[2].update(G[v])

    # Sort P so that it prefers places that would avoid multiple
    # gcsaveroots (smaller 'heuristic' result, so first in sorted
    # order); but also prefers smaller overall pieces, because it
    # might be possible to remove several small-scale pieces instead
    # of one big-scale one.
    def heuristic((index, P, gcsaveroots)):
        return float(len(P)) / len(gcsaveroots)
    Plist.sort(key=heuristic)

    variables_along_changes = {}
    live_at_start_of_block = set()   # set of (block, index)
    insert_gc_push_root = defaultdict(list)

    for index, P, gcsaveroots in Plist:
        # if this Plist entry is not valid any more because of changes
        # done by the previous entries, drop it
        if any((inputvars[v][0], index) in live_at_start_of_block for v in P):
            continue
        if any(op not in block.operations for block, op in gcsaveroots):
            continue
        for v in P:
            assert regalloc.getcolor(v) == index
            assert v not in variables_along_changes

        success_count = 0
        mark = []

        for v in P:
            block, varindex = inputvars[v]
            for link in entrymap[block]:
                w = link.args[varindex]
                if link.prevblock is not None:
                    prevoperations = link.prevblock.operations
                else:
                    prevoperations = []
                for op in reversed(prevoperations):
                    if op.opname == 'gc_pop_roots':
                        # it is possible to have gc_pop_roots() without
                        # w in the args, if w is the result of the call
                        # that comes just before.
                        if (isinstance(w, Variable) and
                                w in op.args and
                                regalloc.checkcolor(w, index)):
                            success_count += 1
                        else:
                            mark.append((index, link, varindex))
                        break
                    if op.result is w:
                        if is_trivial_rewrite(op) and (
                                regalloc.checkcolor(op.args[0], index)):
                            w = op.args[0]
                        else:
                            mark.append((index, link, varindex))
                            break
                else:
                    if not isinstance(w, Variable) or w not in P:
                        mark.append((index, link, varindex))

        if success_count > 0:
            for block, op in gcsaveroots:
                newops = list(block.operations)
                newops.remove(op)
                block.operations = newops
            for index, link, varindex in mark:
                insert_gc_push_root[link].append((index, link.args[varindex]))
            for v in P:
                block, varindex = inputvars[v]
                variables_along_changes[v] = block, index
                live_at_start_of_block.add((block, index))

    for link in insert_gc_push_root:
        newops = [_gc_save_root(index, v)
                  for index, v in sorted(insert_gc_push_root[link])]
        insert_empty_block(link, newops=newops)
Пример #38
0
def add_enter_leave_roots_frame(graph, regalloc, c_gcdata):
    # put 'gc_enter_roots_frame' as late as possible, but before the
    # first 'gc_save_root' is reached.
    #
    # put the 'gc_leave_roots_frame' operations as early as possible,
    # that is, just after the last 'gc_restore_root' reached.  This is
    # done by putting it along a link, such that the previous block
    # contains a 'gc_restore_root' and from the next block it is not
    # possible to reach any extra 'gc_restore_root'; then, as doing
    # this is not as precise as we'd like, we first break every block
    # just after their last 'gc_restore_root'.
    if regalloc is None:
        return

    # break blocks after their last 'gc_restore_root', unless they
    # are already at the last position
    for block in graph.iterblocks():
        ops = block.operations
        for i in range(len(ops)-1, -1, -1):
            if ops[i].opname == 'gc_restore_root':
                if i < len(ops) - 1:
                    split_block(block, i + 1)
                break
    # done

    insert_empty_startblock(graph)
    entrymap = mkentrymap(graph)

    # helpers

    def is_interesting_op(op):
        if op.opname == 'gc_restore_root':
            return True
        if op.opname == 'gc_save_root':
            # ignore saves that say "everything is free"
            return not (isinstance(op.args[1], Constant) and
                        isinstance(op.args[1].value, int) and
                        op.args[1].value == bitmask_all_free)
        return False
    bitmask_all_free = (1 << regalloc.numcolors) - 1

    def insert_along_link(link, opname, args, cache):
        b2 = link.target
        if b2 not in cache:
            newblock = Block([v.copy() for v in b2.inputargs])
            newblock.operations.append(
                SpaceOperation(opname, args, varoftype(lltype.Void)))
            newblock.closeblock(Link(list(newblock.inputargs), b2))
            cache[b2] = newblock
        link.target = cache[b2]

    # make a list of blocks with gc_save_root/gc_restore_root in them
    interesting_blocks = []
    for block in graph.iterblocks():
        for op in block.operations:
            if is_interesting_op(op):
                assert block is not graph.startblock
                assert block is not graph.returnblock
                interesting_blocks.append(block)
                break    # interrupt this block, go to the next one

    # compute the blocks such that 'gc_save_root/gc_restore_root'
    # exist anywhere before the start of this block
    before_blocks = set()
    pending = list(interesting_blocks)
    seen = set(pending)
    while pending:
        block = pending.pop()
        for link in block.exits:
            before_blocks.add(link.target)
            if link.target not in seen:
                seen.add(link.target)
                pending.append(link.target)
    assert graph.startblock not in before_blocks

    # compute the blocks such that 'gc_save_root/gc_restore_root'
    # exist anywhere after the start of this block
    after_blocks = set(interesting_blocks)
    pending = list(interesting_blocks)
    while pending:
        block = pending.pop()
        for link in entrymap[block]:
            if link.prevblock is not None:
                if link.prevblock not in after_blocks:
                    after_blocks.add(link.prevblock)
                    pending.append(link.prevblock)
    assert graph.returnblock not in after_blocks

    # this is the set of blocks such that, at the start of the block,
    # we're "in frame", i.e. there are 'gc_save_root/gc_restore_root'
    # both before and after the start of the block.
    inside_blocks = before_blocks & after_blocks
    inside_or_interesting_blocks = set(interesting_blocks) | inside_blocks

    # if a block contains gc_save_root/gc_restore_root but is not
    # an "inside_block", then add gc_enter_roots_frame where needed
    c_num = Constant(regalloc.numcolors, lltype.Signed)
    for block in interesting_blocks:
        if block not in inside_blocks:
            i = 0
            while not is_interesting_op(block.operations[i]):
                i += 1
            block.operations.insert(i,
                SpaceOperation('gc_enter_roots_frame', [c_gcdata, c_num],
                               varoftype(lltype.Void)))

    # If a link goes from a "non-inside, non-interesting block"
    # straight to an "inside_block", insert a gc_enter_roots_frame
    # along the link.  Similarly, if a block is a "inside-or-
    # interesting_block" and exits with a link going to a
    # "non-inside_block", then insert a gc_leave_roots_frame along the
    # link.
    cache1 = {}
    cache2 = {}
    for block in list(graph.iterblocks()):
        if block not in inside_or_interesting_blocks:
            for link in block.exits:
                if link.target in inside_blocks:
                    insert_along_link(link, 'gc_enter_roots_frame',
                                      [c_gcdata, c_num], cache1)
        else:
            for link in block.exits:
                if link.target not in inside_blocks:
                    insert_along_link(link, 'gc_leave_roots_frame',
                                      [], cache2)

    # check all blocks not in "inside_block": they might contain a
    # gc_save_root() that writes the bitmask meaning "everything is
    # free".  Look only before gc_enter_roots_frame, if there is one
    # in that block.  Remove these out-of-frame gc_save_root().
    for block in graph.iterblocks():
        if block not in inside_blocks:
            newops = []
            for i, op in enumerate(block.operations):
                if op.opname == 'gc_enter_roots_frame':
                    newops.extend(block.operations[i:])
                    break
                if op.opname == 'gc_save_root' and not is_interesting_op(op):
                    pass   # don't add in newops
                else:
                    newops.append(op)
            if len(newops) < len(block.operations):
                block.operations = newops

    join_blocks(graph)  # for the extra new blocks made in this function
Пример #39
0
def partial_escape(translator, graph):
    """
    Main function.
    Blocks, which we'll work on, are in a dequeue, called "worklist", and are
    indexing link-state tuples in "statemap".
    """
    insert_links(graph)
    worklist = deque([graph.startblock])
    statemap = defaultdict(list)
    statemap[graph.startblock] = [(None, {})]
    finished = set()
    entrymap = mkentrymap(graph)
    backedges = find_backedges(graph)

    number_getfield_removed = 0

    while worklist:
        block = worklist.popleft()
        must_be_materialized = block.is_final_block()
        for link in entrymap[block]:
            if link in backedges:
                must_be_materialized = True
        state = get_current_state(statemap[block],
                                  must_be_materialized=must_be_materialized)
        if block.is_final_block():
            continue

        new_operations = []
        # Going through the operations
        for op in block.operations:
            if op.opname == 'malloc':
                # Create new entry for every allocation that is not returned
                if can_remove(op):
                    vobj = VirtualObject(op.result.concretetype, op.args)
                    state[op.result] = vobj
                    vobj.aliases.add(op.result)
                else:
                    new_operations.append(op)
            elif op.opname == 'cast_pointer':
                if op.args[0] in state:
                    # Creating something like an 'alias' for the casting
                    state[op.result] = vobj = state[op.args[0]]
                    vobj.aliases.add(op.result)
                else:
                    new_operations.append(op)
            elif op.opname == 'setfield':
                if op.args[0] in state:
                    state[op.args[0]].vars[op.args[1].value,
                                           op.args[0].concretetype] = op.args[2]
                else:
                    materialize_object(op.args[2], state, new_operations)
                    new_operations.append(op)
            elif op.opname == 'getfield':
                key = op.args[1].value, op.args[0].concretetype
                if op.args[0] in state and key in state[op.args[0]].vars:
                    targ = state[op.args[0]].vars[key]
                    number_getfield_removed += 1
                    if targ in state:
                        state[op.result] = vobj = state[targ]
                        state[targ].aliases.add(vobj)
                    else:
                        new_operations.append(SpaceOperation('same_as',
                                                             [targ],
                                                             op.result))
                else:
                    materialize_object(op.args[0], state, new_operations)
                    new_operations.append(op)
            else:
                for arg in op.args:
                    materialize_object(arg, state, new_operations)
                new_operations.append(op)
        # for all backedges, materialize all arguments (loops aren't supported
        # properly yet)
        for exit in block.exits:
            if exit in backedges or exit.target.is_final_block():
                for arg in exit.args:
                    materialize_object(arg, state, new_operations)
        block.operations = new_operations

        # We're done with the internals of the block. Editing the lists:
        finished.add(block)
        for exit in block.exits:
            # Only adding to the worklist if all its ancestors are processed
            for lnk in entrymap[exit.target]:
                if lnk.prevblock not in finished and lnk not in backedges:
                    break
            else:
                if exit.target not in finished and exit.target not in worklist: # XXX
                    worklist.append(exit.target)
            # setting statemaps:
            statemap[exit.target].append((exit, state))
    if number_getfield_removed:
        if translator.config.translation.verbose:
            log.cse("partial escape analysis removed %s getfields in graph %s" % (number_getfield_removed, graph))
        else:
            log.dot()

    # Done. Cleaning up.
    remove_same_as(graph)
    transform_dead_op_vars(graph)
    eliminate_empty_blocks(graph)
    join_blocks(graph)
    checkgraph(graph)

    return number_getfield_removed
Пример #40
0
def SSA_to_SSI(graph, annotator=None):
    """Turn a number of blocks belonging to a flow graph into valid (i.e. SSI)
    form, assuming that they are only in SSA form (i.e. they can use each
    other's variables directly, without having to pass and rename them along
    links).
    """
    entrymap = mkentrymap(graph)
    del entrymap[graph.startblock]
    builder = DataFlowFamilyBuilder(graph)
    variable_families = builder.get_variable_families()
    del builder

    pending = []  # list of (block, var-used-but-not-defined)
    for block in graph.iterblocks():
        if block not in entrymap:
            continue
        variables_created = variables_created_in(block)
        seen = set(variables_created)
        variables_used = []

        def record_used_var(v):
            if v not in seen:
                variables_used.append(v)
                seen.add(v)

        for op in block.operations:
            for arg in op.args:
                record_used_var(arg)
        record_used_var(block.exitswitch)
        for link in block.exits:
            for arg in link.args:
                record_used_var(arg)

        for v in variables_used:
            if isinstance(v, Variable) and v._name not in ("last_exception_", "last_exc_value_"):
                pending.append((block, v))

    while pending:
        block, v = pending.pop()
        v_rep = variable_families.find_rep(v)
        variables_created = variables_created_in(block)
        if v in variables_created:
            continue  # already ok
        for w in variables_created:
            w_rep = variable_families.find_rep(w)
            if v_rep is w_rep:
                # 'w' is in the same family as 'v', so we can reuse it
                block.renamevariables({v: w})
                break
        else:
            # didn't find it.  Add it to all incoming links.
            try:
                links = entrymap[block]
            except KeyError:
                raise Exception("SSA_to_SSI failed: no way to give a value to" " %r in %r" % (v, block))
            w = v.copy()
            variable_families.union(v, w)
            block.renamevariables({v: w})
            block.inputargs.append(w)
            for link in links:
                link.args.append(v)
                pending.append((link.prevblock, v))