Exemplo n.º 1
0
def constant_diffuse(graph):
    # if the same constants appear at the same positions in all links
    # into a block remove them from the links, remove the corresponding
    # input variables and introduce equivalent same_as at the beginning
    # of the block then try to fold the block further
    count = 0
    for block, links in mkentrymap(graph).iteritems():
        if block is graph.startblock:
            continue
        if block.exits == ():
            continue
        firstlink = links[0]
        rest = links[1:]
        diffuse = []
        for i, c in enumerate(firstlink.args):
            if not isinstance(c, Constant):
                continue
            for lnk in rest:
                if lnk.args[i] != c:
                    break
            else:
                diffuse.append((i, c))
        diffuse.reverse()
        same_as = []
        for i, c in diffuse:
            for lnk in links:
                del lnk.args[i]
            v = block.inputargs.pop(i)
            same_as.append(SpaceOperation('same_as', [c], v))
            count += 1
        block.operations = same_as + block.operations
        if same_as:
            constant_fold_block(block)
    return count
Exemplo n.º 2
0
    def create_exception_handling(self, graph):
        """After an exception in a direct_call (or indirect_call), that is not caught
        by an explicit
        except statement, we need to reraise the exception. So after this
        direct_call we need to test if an exception had occurred. If so, we return
        from the current graph with a special value (False/-1/-1.0/null).
        Because of the added exitswitch we need an additional block.
        """
        if hasattr(graph, 'exceptiontransformed'):
            assert self.same_obj(self.exc_data_ptr, graph.exceptiontransformed)
            return
        else:
            self.raise_analyzer.analyze_direct_call(graph)
            graph.exceptiontransformed = self.exc_data_ptr

        join_blocks(graph)
        # collect the blocks before changing them
        n_need_exc_matching_blocks = 0
        n_gen_exc_checks = 0
        #
        entrymap = mkentrymap(graph)
        if graph.exceptblock in entrymap:
            for link in entrymap[graph.exceptblock]:
                self.transform_jump_to_except_block(graph, entrymap, link)
        #
        for block in list(graph.iterblocks()):
            self.replace_fetch_restore_operations(block)
            need_exc_matching, gen_exc_checks = self.transform_block(
                graph, block)
            n_need_exc_matching_blocks += need_exc_matching
            n_gen_exc_checks += gen_exc_checks
        cleanup_graph(graph)
        return n_need_exc_matching_blocks, n_gen_exc_checks
Exemplo n.º 3
0
    def create_exception_handling(self, graph, always_exc_clear=False):
        """After an exception in a direct_call (or indirect_call), that is not caught
        by an explicit
        except statement, we need to reraise the exception. So after this
        direct_call we need to test if an exception had occurred. If so, we return
        from the current graph with a special value (False/-1/-1.0/null).
        Because of the added exitswitch we need an additional block.
        """
        if hasattr(graph, 'exceptiontransformed'):
            assert self.same_obj(self.exc_data_ptr, graph.exceptiontransformed)
            return
        else:
            self.raise_analyzer.analyze_direct_call(graph)
            graph.exceptiontransformed = self.exc_data_ptr

        self.always_exc_clear = always_exc_clear
        join_blocks(graph)
        # collect the blocks before changing them
        n_need_exc_matching_blocks = 0
        n_gen_exc_checks           = 0
        #
        entrymap = mkentrymap(graph)
        if graph.exceptblock in entrymap:
            for link in entrymap[graph.exceptblock]:
                self.transform_jump_to_except_block(graph, entrymap, link)
        #
        for block in list(graph.iterblocks()):
            self.replace_stack_unwind(block)
            self.replace_fetch_restore_operations(block)
            need_exc_matching, gen_exc_checks = self.transform_block(graph, block)
            n_need_exc_matching_blocks += need_exc_matching
            n_gen_exc_checks           += gen_exc_checks
        cleanup_graph(graph)
        removenoops.remove_superfluous_keep_alive(graph)
        return n_need_exc_matching_blocks, n_gen_exc_checks
Exemplo n.º 4
0
    def test_keep_all_keepalives(self):
        SIZE = llmemory.sizeof(lltype.Signed)
        PARRAY = lltype.Ptr(lltype.FixedSizeArray(lltype.Signed, 1))
        class A:
            def __init__(self):
                self.addr = llmemory.raw_malloc(SIZE)
            def __del__(self):
                llmemory.raw_free(self.addr)
        class B:
            pass
        def myfunc():
            b = B()
            b.keep = A()
            b.data = llmemory.cast_adr_to_ptr(b.keep.addr, PARRAY)
            b.data[0] = 42
            ptr = b.data
            # normally 'b' could go away as early as here, which would free
            # the memory held by the instance of A in b.keep...
            res = ptr[0]
            # ...so we explicitly keep 'b' alive until here
            objectmodel.keepalive_until_here(b)
            return res
        graph = self.check(myfunc, [], [], 42,
                           expected_mallocs=1,    # 'A' instance left
                           expected_calls=1)      # to A.__init__()

        # there is a getarrayitem near the end of the graph of myfunc.
        # However, the memory it accesses must still be protected by the
        # following keepalive, even after malloc removal
        entrymap = mkentrymap(graph)
        [link] = entrymap[graph.returnblock]
        assert link.prevblock.operations[-1].opname == 'keepalive'
Exemplo n.º 5
0
    def get_phi_data(self, block):
        data = []

        entrylinks = mkentrymap(self.graph)[block]
        entrylinks = [x for x in entrylinks if x.prevblock is not None]

        inputargs = self.db.repr_arg_multi(block.inputargs)
        inputargtypes = self.db.repr_arg_type_multi(block.inputargs)

        # for each argument in block, return a 4 tuple of
        # arg_name, arg_type, [list of names from previous blocks,
        # [corresponding list of block names]
        for ii, (arg, type_) in enumerate(zip(inputargs, inputargtypes)):

            names = self.db.repr_arg_multi(
                [link.args[ii] for link in entrylinks])

            blocknames = [
                self.block_to_name[link.prevblock] for link in entrylinks
            ]

            assert len(names) == len(blocknames)
            data.append((arg, type_, names, blocknames))

        return data
Exemplo n.º 6
0
    def test_keep_all_keepalives(self):
        SIZE = llmemory.sizeof(lltype.Signed)
        PARRAY = lltype.Ptr(lltype.FixedSizeArray(lltype.Signed, 1))
        class A:
            def __init__(self):
                self.addr = llmemory.raw_malloc(SIZE)
            def __del__(self):
                llmemory.raw_free(self.addr)
        class B:
            pass
        def myfunc():
            b = B()
            b.keep = A()
            b.data = llmemory.cast_adr_to_ptr(b.keep.addr, PARRAY)
            b.data[0] = 42
            ptr = b.data
            # normally 'b' could go away as early as here, which would free
            # the memory held by the instance of A in b.keep...
            res = ptr[0]
            # ...so we explicitly keep 'b' alive until here
            objectmodel.keepalive_until_here(b)
            return res
        graph = self.check(myfunc, [], [], 42,
                           must_be_removed=False)    # 'A' instance left

        # there is a getarrayitem near the end of the graph of myfunc.
        # However, the memory it accesses must still be protected by the
        # following keepalive, even after malloc removal
        entrymap = mkentrymap(graph)
        [link] = entrymap[graph.returnblock]
        assert link.prevblock.operations[-1].opname == 'keepalive'
Exemplo n.º 7
0
def find_initializing_stores(collect_analyzer, graph):
    from pypy.objspace.flow.model import mkentrymap
    entrymap = mkentrymap(graph)
    # a bit of a hackish analysis: if a block contains a malloc and check that
    # the result is not zero, then the block following the True link will
    # usually initialize the newly allocated object
    result = {}
    def find_in_block(block, mallocvars):
        for i, op in enumerate(block.operations):
            if op.opname in ("cast_pointer", "same_as"):
                if op.args[0] in mallocvars:
                    mallocvars[op.result] = True
            elif op.opname in ("setfield", "setarrayitem", "setinteriorfield"):
                TYPE = op.args[-1].concretetype
                if (op.args[0] in mallocvars and
                    isinstance(TYPE, lltype.Ptr) and
                    TYPE.TO._gckind == "gc"):
                    result[op] = True
            else:
                if collect_analyzer.analyze(op):
                    return
        for exit in block.exits:
            if len(entrymap[exit.target]) != 1:
                continue
            newmallocvars = {}
            for i, var in enumerate(exit.args):
                if var in mallocvars:
                    newmallocvars[exit.target.inputargs[i]] = True
            if newmallocvars:
                find_in_block(exit.target, newmallocvars)
    mallocnum = 0
    blockset = set(graph.iterblocks())
    while blockset:
        block = blockset.pop()
        if len(block.operations) < 2:
            continue
        mallocop = block.operations[-2]
        checkop = block.operations[-1]
        if not (mallocop.opname == "malloc" and
                checkop.opname == "ptr_nonzero" and
                mallocop.result is checkop.args[0] and
                block.exitswitch is checkop.result):
            continue
        exits = [exit for exit in block.exits if exit.llexitcase]
        if len(exits) != 1:
            continue
        exit = exits[0]
        if len(entrymap[exit.target]) != 1:
            continue
        try:
            index = exit.args.index(mallocop.result)
        except ValueError:
            continue
        target = exit.target
        mallocvars = {target.inputargs[index]: True}
        mallocnum += 1
        find_in_block(target, mallocvars)
    #if result:
    #    print "found %s initializing stores in %s" % (len(result), graph.name)
    return result
Exemplo n.º 8
0
def find_initializing_stores(collect_analyzer, graph):
    from pypy.objspace.flow.model import mkentrymap
    entrymap = mkentrymap(graph)
    # a bit of a hackish analysis: if a block contains a malloc and check that
    # the result is not zero, then the block following the True link will
    # usually initialize the newly allocated object
    result = {}
    def find_in_block(block, mallocvars):
        for i, op in enumerate(block.operations):
            if op.opname in ("cast_pointer", "same_as"):
                if op.args[0] in mallocvars:
                    mallocvars[op.result] = True
            elif op.opname in ("setfield", "setarrayitem", "setinteriorfield"):
                TYPE = op.args[-1].concretetype
                if (op.args[0] in mallocvars and
                    isinstance(TYPE, lltype.Ptr) and
                    TYPE.TO._gckind == "gc"):
                    result[op] = True
            else:
                if collect_analyzer.analyze(op):
                    return
        for exit in block.exits:
            if len(entrymap[exit.target]) != 1:
                continue
            newmallocvars = {}
            for i, var in enumerate(exit.args):
                if var in mallocvars:
                    newmallocvars[exit.target.inputargs[i]] = True
            if newmallocvars:
                find_in_block(exit.target, newmallocvars)
    mallocnum = 0
    blockset = set(graph.iterblocks())
    while blockset:
        block = blockset.pop()
        if len(block.operations) < 2:
            continue
        mallocop = block.operations[-2]
        checkop = block.operations[-1]
        if not (mallocop.opname == "malloc" and
                checkop.opname == "ptr_nonzero" and
                mallocop.result is checkop.args[0] and
                block.exitswitch is checkop.result):
            continue
        exits = [exit for exit in block.exits if exit.llexitcase]
        if len(exits) != 1:
            continue
        exit = exits[0]
        if len(entrymap[exit.target]) != 1:
            continue
        try:
            index = exit.args.index(mallocop.result)
        except ValueError:
            continue
        target = exit.target
        mallocvars = {target.inputargs[index]: True}
        mallocnum += 1
        find_in_block(target, mallocvars)
    #if result:
    #    print "found %s initializing stores in %s" % (len(result), graph.name)
    return result
Exemplo n.º 9
0
def remove_identical_vars(graph):
    """When the same variable is passed multiple times into the next block,
    pass it only once.  This enables further optimizations by the annotator,
    which otherwise doesn't realize that tests performed on one of the copies
    of the variable also affect the other."""

    # This algorithm is based on DataFlowFamilyBuilder, used as a
    # "phi node remover" (in the SSA sense).  'variable_families' is a
    # UnionFind object that groups variables by families; variables from the
    # same family can be identified, and if two input arguments of a block
    # end up in the same family, then we really remove one of them in favor
    # of the other.
    #
    # The idea is to identify as much variables as possible by trying
    # iteratively two kinds of phi node removal:
    #
    #  * "vertical", by identifying variables from different blocks, when
    #    we see that a value just flows unmodified into the next block without
    #    needing any merge (this is what backendopt.ssa.SSI_to_SSA() would do
    #    as well);
    #
    #  * "horizontal", by identifying two input variables of the same block,
    #    when these two variables' phi nodes have the same argument -- i.e.
    #    when for all possible incoming paths they would get twice the same
    #    value (this is really the purpose of remove_identical_vars()).
    #
    from pypy.translator.backendopt.ssa import DataFlowFamilyBuilder

    builder = DataFlowFamilyBuilder(graph)
    variable_families = builder.get_variable_families()  # vertical removal
    while True:
        if not builder.merge_identical_phi_nodes():  # horizontal removal
            break
        if not builder.complete():  # vertical removal
            break

    for block, links in mkentrymap(graph).items():
        if block is graph.startblock:
            continue
        renaming = {}
        family2blockvar = {}
        kills = []
        for i, v in enumerate(block.inputargs):
            v1 = variable_families.find_rep(v)
            if v1 in family2blockvar:
                # already seen -- this variable can be shared with the
                # previous one
                renaming[v] = family2blockvar[v1]
                kills.append(i)
            else:
                family2blockvar[v1] = v
        if renaming:
            block.renamevariables(renaming)
            # remove the now-duplicate input variables
            kills.reverse()  # starting from the end
            for i in kills:
                del block.inputargs[i]
                for link in links:
                    del link.args[i]
Exemplo n.º 10
0
def remove_identical_vars(graph):
    """When the same variable is passed multiple times into the next block,
    pass it only once.  This enables further optimizations by the annotator,
    which otherwise doesn't realize that tests performed on one of the copies
    of the variable also affect the other."""

    # This algorithm is based on DataFlowFamilyBuilder, used as a
    # "phi node remover" (in the SSA sense).  'variable_families' is a
    # UnionFind object that groups variables by families; variables from the
    # same family can be identified, and if two input arguments of a block
    # end up in the same family, then we really remove one of them in favor
    # of the other.
    #
    # The idea is to identify as much variables as possible by trying
    # iteratively two kinds of phi node removal:
    #
    #  * "vertical", by identifying variables from different blocks, when
    #    we see that a value just flows unmodified into the next block without
    #    needing any merge (this is what backendopt.ssa.SSI_to_SSA() would do
    #    as well);
    #
    #  * "horizontal", by identifying two input variables of the same block,
    #    when these two variables' phi nodes have the same argument -- i.e.
    #    when for all possible incoming paths they would get twice the same
    #    value (this is really the purpose of remove_identical_vars()).
    #
    if simplify_disabled(graph): return
    from pypy.translator.backendopt.ssa import DataFlowFamilyBuilder
    builder = DataFlowFamilyBuilder(graph)
    variable_families = builder.get_variable_families()  # vertical removal
    while True:
        if not builder.merge_identical_phi_nodes():  # horizontal removal
            break
        if not builder.complete():  # vertical removal
            break

    for block, links in mkentrymap(graph).items():
        if block is graph.startblock:
            continue
        renaming = {}
        family2blockvar = {}
        kills = []
        for i, v in enumerate(block.inputargs):
            v1 = variable_families.find_rep(v)
            if v1 in family2blockvar:
                # already seen -- this variable can be shared with the
                # previous one
                renaming[v] = family2blockvar[v1]
                kills.append(i)
            else:
                family2blockvar[v1] = v
        if renaming:
            block.renamevariables(renaming)
            # remove the now-duplicate input variables
            kills.reverse()  # starting from the end
            for i in kills:
                del block.inputargs[i]
                for link in links:
                    del link.args[i]
Exemplo n.º 11
0
 def __init__(self, graph):
     self.graph = graph
     self.startblock = graph.startblock
     self.returnblock = graph.returnblock
     self.vartonode = {}
     self.link_to_conditions = {}
     self.entrymap = flowmodel.mkentrymap(graph)
     self.seenblocks = set()
Exemplo n.º 12
0
def merge_if_blocks_once(graph):
    """Convert consecutive blocks that all compare a variable (of Primitive type)
    with a constant into one block with multiple exits. The backends can in
    turn output this block as a switch statement.
    """
    candidates = [block for block in graph.iterblocks()
                      if is_chain_block(block, first=True)]
    entrymap = mkentrymap(graph)
    for firstblock in candidates:
        chain = []
        checkvars = []
        varmap = {}  # {var in a block in the chain: var in the first block}
        for var in firstblock.exits[0].args:
            varmap[var] = var
        for var in firstblock.exits[1].args:
            varmap[var] = var
        def add_to_varmap(var, newvar):
            if isinstance(var, Variable):
                varmap[newvar] = varmap[var]
            else:
                varmap[newvar] = var
        current = firstblock
        while 1:
            # check whether the chain can be extended with the block that follows the
            # False link
            checkvar = [var for var in current.operations[-1].args
                           if isinstance(var, Variable)][0]
            case = [var for var in current.operations[-1].args
                       if isinstance(var, Constant)][0]
            chain.append((current, case))
            checkvars.append(checkvar)
            falseexit = current.exits[0]
            assert not falseexit.exitcase
            trueexit = current.exits[1]
            targetblock = falseexit.target
            if len(entrymap[targetblock]) != 1:
                break
            if checkvar not in falseexit.args:
                break
            newcheckvar = targetblock.inputargs[falseexit.args.index(checkvar)]
            if not is_chain_block(targetblock):
                break
            if newcheckvar not in targetblock.operations[0].args:
                break
            for i, var in enumerate(trueexit.args):
                add_to_varmap(var, trueexit.target.inputargs[i])
            for i, var in enumerate(falseexit.args):
                add_to_varmap(var, falseexit.target.inputargs[i])
            current = targetblock
        if len(chain) > 1:
            break
    else:
        return False
    merge_chain(chain, checkvars[0], varmap, graph)
    checkgraph(graph)
    return True
Exemplo n.º 13
0
def join_blocks(graph):
    """Links can be deleted if they are the single exit of a block and
    the single entry point of the next block.  When this happens, we can
    append all the operations of the following block to the preceeding
    block (but renaming variables with the appropriate arguments.)
    """
    if simplify_disabled(graph): return
    entrymap = mkentrymap(graph)
    block = graph.startblock
    seen = {block: True}
    stack = list(block.exits)
    while stack:
        link = stack.pop()
        if (link.prevblock.exitswitch is None
                and len(entrymap[link.target]) == 1
                and link.target.exits):  # stop at the returnblock
            assert len(link.prevblock.exits) == 1
            renaming = {}
            for vprev, vtarg in zip(link.args, link.target.inputargs):
                renaming[vtarg] = vprev

            def rename(v):
                return renaming.get(v, v)

            def rename_op(op):
                args = [rename(a) for a in op.args]
                op = SpaceOperation(op.opname, args, rename(op.result),
                                    op.offset)
                # special case...
                if op.opname == 'indirect_call':
                    if isinstance(op.args[0], Constant):
                        assert isinstance(op.args[-1], Constant)
                        del op.args[-1]
                        op.opname = 'direct_call'
                return op

            for op in link.target.operations:
                link.prevblock.operations.append(rename_op(op))
            exits = []
            for exit in link.target.exits:
                newexit = exit.copy(rename)
                exits.append(newexit)
            newexitswitch = rename(link.target.exitswitch)
            link.prevblock.exitswitch = newexitswitch
            link.prevblock.recloseblock(*exits)
            if isinstance(newexitswitch,
                          Constant) and newexitswitch != c_last_exception:
                exits = replace_exitswitch_by_constant(link.prevblock,
                                                       newexitswitch)
            stack.extend(exits)
        else:
            if link.target not in seen:
                stack.extend(link.target.exits)
                seen[link.target] = True
Exemplo n.º 14
0
 def gen_graph(self, public=True):
     fun = self.functiongraph
     self.entrymap = mkentrymap(fun)
     currentlines = self.lines
     self.lines = []
     self.indent += 1 
     self.gen_block(fun.startblock)
     self.indent -= 1
     # emit the header after the body
     functionbodylines = self.lines
     self.lines = currentlines
     inputargnames = [ " ".join(self._paramvardecl(var)) for var in fun.getargs() ]
     params = ", ".join(inputargnames)
     returntype = self.get_type(fun.getreturnvar())
     returntypename = self._gettypename(returntype)
     try:
         function_object = self.by_the_way_the_function_was   # XXX!
     except AttributeError:
         def function_object(): pass   # XXX!!!
     if public:
         # make the function visible from the outside
         # under its original name
         args = ', '.join([var.name for var in fun.getargs()])
         self.putline("def %s(%s):" % (fun.name.split('.')[-1], args))
         self.indent += 1
         self.putline("return %s(%s)" % (
             self.getfunctionname(function_object), args))
         self.indent -= 1
     # go ahead with the mandled header and body of the function
     self.putline("cdef %s %s(%s):" % (
         returntypename,
         self.getfunctionname(function_object),
         params))
     self.indent += 1
     #self.putline("# %r" % self.annotations)
     decllines = []
     missing_decl = []
     funargs = fun.getargs()
     for block in self.blockids:
         for var in block.getvariables():
             if var not in funargs:
                 decl = self._vardecl(var)
                 if decl:
                     decllines.append(decl)
                 else:
                     missing_decl.append(self.get_varname(var))
     if missing_decl:
         missing_decl.sort()
         decllines.append('# untyped variables: ' + ' '.join(missing_decl))
     decllines.sort()
     for decl in decllines:
         self.putline(decl)
     self.indent -= 1
     self.lines.extend(functionbodylines)
Exemplo n.º 15
0
 def test_multiple_catch_simple_call(self):
     graph = self.codetest(self.multiple_catch_simple_call)
     simplify_graph(graph)
     assert self.all_operations(graph) == {"simple_call": 1}
     entrymap = mkentrymap(graph)
     links = entrymap[graph.returnblock]
     assert len(links) == 3
     assert dict.fromkeys([link.exitcase for link in links]) == dict.fromkeys([None, IndexError, OSError])
     links = entrymap[graph.exceptblock]
     assert len(links) == 1
     assert links[0].exitcase is Exception
Exemplo n.º 16
0
 def test_multiple_catch_simple_call(self):
     graph = self.codetest(self.multiple_catch_simple_call)
     simplify_graph(graph)
     assert self.all_operations(graph) == {'simple_call': 1}
     entrymap = mkentrymap(graph)
     links = entrymap[graph.returnblock]
     assert len(links) == 3
     assert (dict.fromkeys([link.exitcase for link in links]) ==
             dict.fromkeys([None, IndexError, OSError]))
     links = entrymap[graph.exceptblock]
     assert len(links) == 1
     assert links[0].exitcase is Exception
Exemplo n.º 17
0
def join_blocks(graph):
    """Links can be deleted if they are the single exit of a block and
    the single entry point of the next block.  When this happens, we can
    append all the operations of the following block to the preceeding
    block (but renaming variables with the appropriate arguments.)
    """
    entrymap = mkentrymap(graph)
    block = graph.startblock
    seen = {block: True}
    stack = list(block.exits)
    while stack:
        link = stack.pop()
        if (
            link.prevblock.exitswitch is None and len(entrymap[link.target]) == 1 and link.target.exits
        ):  # stop at the returnblock
            assert len(link.prevblock.exits) == 1
            renaming = {}
            for vprev, vtarg in zip(link.args, link.target.inputargs):
                renaming[vtarg] = vprev

            def rename(v):
                return renaming.get(v, v)

            def rename_op(op):
                args = [rename(a) for a in op.args]
                op = SpaceOperation(op.opname, args, rename(op.result), op.offset)
                # special case...
                if op.opname == "indirect_call":
                    if isinstance(op.args[0], Constant):
                        assert isinstance(op.args[-1], Constant)
                        del op.args[-1]
                        op.opname = "direct_call"
                return op

            for op in link.target.operations:
                link.prevblock.operations.append(rename_op(op))
            exits = []
            for exit in link.target.exits:
                newexit = exit.copy(rename)
                exits.append(newexit)
            newexitswitch = rename(link.target.exitswitch)
            link.prevblock.exitswitch = newexitswitch
            link.prevblock.recloseblock(*exits)
            if isinstance(newexitswitch, Constant) and newexitswitch != c_last_exception:
                exits = replace_exitswitch_by_constant(link.prevblock, newexitswitch)
            stack.extend(exits)
        else:
            if link.target not in seen:
                stack.extend(link.target.exits)
                seen[link.target] = True
Exemplo n.º 18
0
def transform_ovfcheck(graph):
    """The special function calls ovfcheck and ovfcheck_lshift need to
    be translated into primitive operations. ovfcheck is called directly
    after an operation that should be turned into an overflow-checked
    version. It is considered a syntax error if the resulting <op>_ovf
    is not defined in objspace/flow/objspace.py.
    ovfcheck_lshift is special because there is no preceding operation.
    Instead, it will be replaced by an OP_LSHIFT_OVF operation.
    """
    from pypy.rlib.rarithmetic import ovfcheck, ovfcheck_lshift
    from pypy.objspace.flow.objspace import implicit_exceptions
    covf = Constant(ovfcheck)
    covfls = Constant(ovfcheck_lshift)

    def check_syntax(opname):
        exlis = implicit_exceptions.get("%s_ovf" % (opname, ), [])
        if OverflowError not in exlis:
            raise Exception("ovfcheck in %s: Operation %s has no"
                            " overflow variant" % (graph.name, opname))

    for block in graph.iterblocks():
        for i in range(len(block.operations) - 1, -1, -1):
            op = block.operations[i]
            if op.opname != 'simple_call':
                continue
            if op.args[0] == covf:
                if i == 0:
                    # hard case: ovfcheck() on an operation that occurs
                    # in the previous block, like 'floordiv'.  The generic
                    # exception handling around the ovfcheck() is enough
                    # to cover all cases; kill the one around the previous op.
                    entrymap = mkentrymap(graph)
                    links = entrymap[block]
                    assert len(links) == 1
                    prevblock = links[0].prevblock
                    assert prevblock.exits[0].target is block
                    prevblock.exitswitch = None
                    prevblock.exits = (links[0], )
                    join_blocks(graph)  # merge the two blocks together
                    transform_ovfcheck(graph)  # ...and try again
                    return
                op1 = block.operations[i - 1]
                check_syntax(op1.opname)
                op1.opname += '_ovf'
                del block.operations[i]
                block.renamevariables({op.result: op1.result})
            elif op.args[0] == covfls:
                op.opname = 'lshift_ovf'
                del op.args[0]
Exemplo n.º 19
0
def transform_ovfcheck(graph):
    """The special function calls ovfcheck and ovfcheck_lshift need to
    be translated into primitive operations. ovfcheck is called directly
    after an operation that should be turned into an overflow-checked
    version. It is considered a syntax error if the resulting <op>_ovf
    is not defined in objspace/flow/objspace.py.
    ovfcheck_lshift is special because there is no preceding operation.
    Instead, it will be replaced by an OP_LSHIFT_OVF operation.
    """
    from pypy.rlib.rarithmetic import ovfcheck, ovfcheck_lshift
    from pypy.objspace.flow.objspace import implicit_exceptions
    covf = Constant(ovfcheck)
    covfls = Constant(ovfcheck_lshift)

    def check_syntax(opname):
        exlis = implicit_exceptions.get("%s_ovf" % (opname,), [])
        if OverflowError not in exlis:
            raise Exception("ovfcheck in %s: Operation %s has no"
                            " overflow variant" % (graph.name, opname))

    for block in graph.iterblocks():
        for i in range(len(block.operations)-1, -1, -1):
            op = block.operations[i]
            if op.opname != 'simple_call':
                continue
            if op.args[0] == covf:
                if i == 0:
                    # hard case: ovfcheck() on an operation that occurs
                    # in the previous block, like 'floordiv'.  The generic
                    # exception handling around the ovfcheck() is enough
                    # to cover all cases; kill the one around the previous op.
                    entrymap = mkentrymap(graph)
                    links = entrymap[block]
                    assert len(links) == 1
                    prevblock = links[0].prevblock
                    assert prevblock.exits[0].target is block
                    prevblock.exitswitch = None
                    prevblock.exits = (links[0],)
                    join_blocks(graph)         # merge the two blocks together
                    transform_ovfcheck(graph)  # ...and try again
                    return
                op1 = block.operations[i-1]
                check_syntax(op1.opname)
                op1.opname += '_ovf'
                del block.operations[i]
                block.renamevariables({op.result: op1.result})
            elif op.args[0] == covfls:
                op.opname = 'lshift_ovf'
                del op.args[0]
Exemplo n.º 20
0
def mkinsideentrymap(graph_or_blocks):
    # graph_or_blocks can be a full FunctionGraph, or a mapping
    # {block: reachable-from-outside-flag}.
    if isinstance(graph_or_blocks, dict):
        blocks = graph_or_blocks
        entrymap = {}
        for block in blocks:
            for link in block.exits:
                if link.target in blocks and not blocks[link.target]:
                    entrymap.setdefault(link.target, []).append(link)
        return entrymap
    else:
        graph = graph_or_blocks
        entrymap = mkentrymap(graph)
        del entrymap[graph.startblock]
        return entrymap
Exemplo n.º 21
0
def mkinsideentrymap(graph_or_blocks):
    # graph_or_blocks can be a full FunctionGraph, or a mapping
    # {block: reachable-from-outside-flag}.
    if isinstance(graph_or_blocks, dict):
        blocks = graph_or_blocks
        entrymap = {}
        for block in blocks:
            for link in block.exits:
                if link.target in blocks and not blocks[link.target]:
                    entrymap.setdefault(link.target, []).append(link)
        return entrymap
    else:
        graph = graph_or_blocks
        entrymap = mkentrymap(graph)
        del entrymap[graph.startblock]
        return entrymap
Exemplo n.º 22
0
    def test_simple_struct(self):
        S0 = RStruct('S0', [('x', rc_int)])
        def func():
            s = S0.allocate()
            s.ref_x().set_value(12)
            return s.ref_x().get_value()

        interp, graph = get_interpreter(func, [], policy=POLICY,
                                        backendopt=True)
        res = interp.eval_graph(graph, [])
        assert res == 12
        # after inlining the get_value() call, there is a getarrayitem
        # at the end of the main graph.  However, the memory it accesses
        # must be protected by a following keepalive...
        entrymap = mkentrymap(graph)
        [link] = entrymap[graph.returnblock]
        assert link.prevblock.operations[-1].opname == 'keepalive'
Exemplo n.º 23
0
def constant_diffuse(graph):
    count = 0
    # after 'exitswitch vexit', replace 'vexit' with the corresponding constant
    # if it also appears on the outgoing links
    for block in graph.iterblocks():
        vexit = block.exitswitch
        if isinstance(vexit, Variable):
            for link in block.exits:
                if vexit in link.args and link.exitcase != 'default':
                    remap = {
                        vexit: Constant(link.llexitcase, vexit.concretetype)
                    }
                    link.args = [remap.get(v, v) for v in link.args]
                    count += 1
    # if the same constants appear at the same positions in all links
    # into a block remove them from the links, remove the corresponding
    # input variables and introduce equivalent same_as at the beginning
    # of the block then try to fold the block further
    for block, links in mkentrymap(graph).iteritems():
        if block is graph.startblock:
            continue
        if block.exits == ():
            continue
        firstlink = links[0]
        rest = links[1:]
        diffuse = []
        for i, c in enumerate(firstlink.args):
            if not isinstance(c, Constant):
                continue
            for lnk in rest:
                if lnk.args[i] != c:
                    break
            else:
                diffuse.append((i, c))
        diffuse.reverse()
        same_as = []
        for i, c in diffuse:
            for lnk in links:
                del lnk.args[i]
            v = block.inputargs.pop(i)
            same_as.append(SpaceOperation('same_as', [c], v))
            count += 1
        block.operations = same_as + block.operations
        if same_as:
            constant_fold_block(block)
    return count
Exemplo n.º 24
0
def constant_diffuse(graph):
    count = 0
    # after 'exitswitch vexit', replace 'vexit' with the corresponding constant
    # if it also appears on the outgoing links
    for block in graph.iterblocks():
        vexit = block.exitswitch
        if isinstance(vexit, Variable):
            for link in block.exits:
                if vexit in link.args and link.exitcase != 'default':
                    remap = {vexit: Constant(link.llexitcase,
                                             vexit.concretetype)}
                    link.args = [remap.get(v, v) for v in link.args]
                    count += 1
    # if the same constants appear at the same positions in all links
    # into a block remove them from the links, remove the corresponding
    # input variables and introduce equivalent same_as at the beginning
    # of the block then try to fold the block further
    for block, links in mkentrymap(graph).iteritems():
        if block is graph.startblock:
            continue
        if block.exits == ():
            continue
        firstlink = links[0]
        rest = links[1:]
        diffuse = []
        for i, c in enumerate(firstlink.args):
            if not isinstance(c, Constant):
                continue
            for lnk in rest:
                if lnk.args[i] != c:
                    break
            else:
                diffuse.append((i, c))
        diffuse.reverse()
        same_as = []
        for i, c in diffuse:
            for lnk in links:
                del lnk.args[i]
            v = block.inputargs.pop(i)
            same_as.append(SpaceOperation('same_as', [c], v))
            count += 1
        block.operations = same_as + block.operations
        if same_as:
            constant_fold_block(block)
    return count
Exemplo n.º 25
0
 def compute_merge_points(self):
     entrymap = mkentrymap(self.graph)
     startblock = self.graph.startblock
     global_merge_blocks = {}
     for block in self.graph.iterblocks():
         if not block.operations:
             continue
         op = block.operations[0]
         hashint = False
         cand = 0
         if (op.opname == 'hint' and
             op.args[1].value == {'global_merge_point': True}):
             hashint = True
             if block is startblock or len(entrymap[block]) > 1:
                 global_merge_blocks[block] = True
                 cand += 1
             else:
                 prevblock = entrymap[block][0].prevblock
                 if len(entrymap[prevblock]) > 1:
                     global_merge_blocks[prevblock] = True
                     cand += 1
         #op = block.operations[-1]
         #if (op.opname == 'hint' and
         #    op.args[1].value == {'global_merge_point': True}):
         #    hashint = True
         #    for link in block.exits:
         #        if len(entrymap[link.target]) > 1:
         #            global_merge_blocks[link.target] = True
         #            cand += 1
         assert not hashint or cand==1, (
             "ambigous global merge point hint: %r" % block)
         for op in block.operations[1:]:
             assert not (op.opname == 'hint' and
                 op.args[1].value == {'global_merge_point': True}), (
                 "stranded global merge point hint: %r" % block)
             
     for block, links in entrymap.items():
         if len(links) > 1 and block is not self.graph.returnblock:
             if block in global_merge_blocks:
                 self.mergepoint_set[block] = 'global'
             else:
                 self.mergepoint_set[block] = 'local'
     if startblock in global_merge_blocks:
         self.mergepoint_set[startblock] = 'global'
Exemplo n.º 26
0
 def inline_once(self, block, index_operation):
     self.varmap = {}
     self._copied_blocks = {}
     self.op = block.operations[index_operation]
     self.graph_to_inline = self.get_graph_from_op(self.op)
     self.exception_guarded = False
     if (block.exitswitch == c_last_exception and
         index_operation == len(block.operations) - 1):
         self.exception_guarded = True
         if self.inline_guarded_calls:
             if (not self.inline_guarded_calls_no_matter_what and 
                 does_raise_directly(self.graph_to_inline, self.raise_analyzer)):
                 raise CannotInline("can't inline because the call is exception guarded")
         elif any_call_to_raising_graphs(self.graph_to_inline,
                                         self.translator, self.raise_analyzer):
             raise CannotInline("can't handle exceptions")
     self._passon_vars = {}
     self.entrymap = mkentrymap(self.graph_to_inline)
     self.do_inline(block, index_operation)
Exemplo n.º 27
0
 def inline_once(self, block, index_operation):
     self.varmap = {}
     self._copied_blocks = {}
     self.op = block.operations[index_operation]
     self.graph_to_inline = self.get_graph_from_op(self.op)
     self.exception_guarded = False
     if (block.exitswitch == c_last_exception and
         index_operation == len(block.operations) - 1):
         self.exception_guarded = True
         if self.inline_guarded_calls:
             if (not self.inline_guarded_calls_no_matter_what and 
                 does_raise_directly(self.graph_to_inline, self.raise_analyzer)):
                 raise CannotInline("can't inline because the call is exception guarded")
         elif any_call_to_raising_graphs(self.graph_to_inline,
                                         self.translator, self.raise_analyzer):
             raise CannotInline("can't handle exceptions")
     self._passon_vars = {}
     self.entrymap = mkentrymap(self.graph_to_inline)
     self.do_inline(block, index_operation)
Exemplo n.º 28
0
def remove_tail_calls_to_self(translator, graph):
    entrymap = mkentrymap(graph)
    changed = False
    for link in entrymap[graph.returnblock]:
        block = link.prevblock
        if (len(block.exits) == 1 and len(block.operations) > 0
                and block.operations[-1].opname == 'direct_call'
                and block.operations[-1].result == link.args[0]):
            call = get_graph(block.operations[-1].args[0], translator)
            print "getgraph", graph
            if graph is graph:
                _remove_tail_call(translator, graph, block)
                changed = True
    if changed:
        from pypy.translator import simplify
        checkgraph(graph)
        simplify.remove_identical_vars(graph)
        simplify.eliminate_empty_blocks(graph)
        simplify.join_blocks(graph)
Exemplo n.º 29
0
def remove_tail_calls_to_self(translator, graph):
    entrymap = mkentrymap(graph)
    changed = False
    for link in entrymap[graph.returnblock]:
        block = link.prevblock
        if (len(block.exits) == 1 and
            len(block.operations) > 0 and
            block.operations[-1].opname == 'direct_call' and
            block.operations[-1].result == link.args[0]):
            call = get_graph(block.operations[-1].args[0], translator)
            print "getgraph", graph
            if graph is graph:
                _remove_tail_call(translator, graph, block)
                changed = True
    if changed:
        from pypy.translator import simplify
        checkgraph(graph)
        simplify.remove_identical_vars(graph)
        simplify.eliminate_empty_blocks(graph)
        simplify.join_blocks(graph)
Exemplo n.º 30
0
    def get_phi_data(self, block):
        data = []
        
        entrylinks = mkentrymap(self.graph)[block]
        entrylinks = [x for x in entrylinks if x.prevblock is not None]

        inputargs = self.db.repr_arg_multi(block.inputargs)
        inputargtypes = self.db.repr_arg_type_multi(block.inputargs)

        # for each argument in block, return a 4 tuple of
        # arg_name, arg_type, [list of names from previous blocks,
        # [corresponding list of block names]
        for ii, (arg, type_) in enumerate(zip(inputargs, inputargtypes)):

            names = self.db.repr_arg_multi([link.args[ii]
                                            for link in entrylinks])

            blocknames = [self.block_to_name[link.prevblock]
                          for link in entrylinks]

            assert len(names) == len(blocknames)
            data.append((arg, type_, names, blocknames))

        return data
Exemplo n.º 31
0
def compile_graph(rgenop, graph, random_seed=0):
    FUNC = lltype.FuncType([v.concretetype for v in graph.getargs()],
                           graph.getreturnvar().concretetype)
    sigtoken = rgenop.sigToken(FUNC)
    builder, gv_entrypoint, args_gv = rgenop.newgraph(
        sigtoken, "compiled_%s" % (graph.name, ))

    def varkind(v):
        return rgenop.kindToken(v.concretetype)

    def var2gv(v):
        if isinstance(v, flowmodel.Variable):
            return varmap[v]
        else:
            return rgenop.genconst(v.value)

    map(varkind, graph.getargs())  # for the py.test.skip() in some backends
    pending_blocks = [(graph.startblock, builder, args_gv)]
    more_pending_blocks = []
    entrymap = flowmodel.mkentrymap(graph)
    entrymap[graph.returnblock] = "force a label"
    labels = {graph.returnblock: None}
    r = random.Random(random_seed)

    while pending_blocks or more_pending_blocks:
        if not pending_blocks:
            r.shuffle(more_pending_blocks)
            pending_blocks = more_pending_blocks
            more_pending_blocks = []
        block, builder, args_gv = pending_blocks.pop()
        builder.start_writing()

        # the following loop generates a chain of blocks
        # (a branch in the graph)
        while True:
            assert len(args_gv) == len(block.inputargs)
            if len(entrymap[block]) > 1:
                # need a label at the start of this block
                if block in labels:
                    # already got one, jump to it
                    label = labels[block]
                    if label is not None:
                        builder.finish_and_goto(args_gv, labels[block])
                    else:
                        [retvar] = args_gv
                        builder.finish_and_return(sigtoken, retvar)
                    break  # done along this branch
                else:
                    # create a label and proceed
                    kinds = map(varkind, block.inputargs)
                    labels[block] = builder.enter_next_block(kinds, args_gv)

            # generate the operations
            varmap = dict(zip(block.inputargs, args_gv))
            for op in block.operations:
                gv_result = generate_operation(rgenop, builder, op, var2gv)
                varmap[op.result] = gv_result

            if block.exitswitch is None:
                [link] = block.exits
            else:
                if block.exitswitch.concretetype is not lltype.Bool:
                    raise NotImplementedError("XXX switches")
                i = r.randrange(0, 2)
                jumplink = block.exits[i]
                args_gv = map(var2gv, jumplink.args)
                if jumplink.exitcase:
                    meth = builder.jump_if_true
                else:
                    meth = builder.jump_if_false
                vars_gv = {}
                for v in args_gv:
                    if not v.is_const:
                        vars_gv[v] = True
                newbuilder = meth(varmap[block.exitswitch], vars_gv.keys())
                more_pending_blocks.append(
                    (jumplink.target, newbuilder, args_gv))
                link = block.exits[1 - i]

            args_gv = map(var2gv, link.args)
            block = link.target

    builder.end()
    return gv_entrypoint
Exemplo n.º 32
0
def compile_graph(rgenop, graph, random_seed=0):
    FUNC = lltype.FuncType([v.concretetype for v in graph.getargs()],
                           graph.getreturnvar().concretetype)
    sigtoken = rgenop.sigToken(FUNC)
    builder, gv_entrypoint, args_gv = rgenop.newgraph(sigtoken,
                                         "compiled_%s" % (graph.name,))

    def varkind(v):
        return rgenop.kindToken(v.concretetype)

    def var2gv(v):
        if isinstance(v, flowmodel.Variable):
            return varmap[v]
        else:
            return rgenop.genconst(v.value)

    map(varkind, graph.getargs())    # for the py.test.skip() in some backends
    pending_blocks = [(graph.startblock, builder, args_gv)]
    more_pending_blocks = []
    entrymap = flowmodel.mkentrymap(graph)
    entrymap[graph.returnblock] = "force a label"
    labels = {graph.returnblock: None}
    r = random.Random(random_seed)

    while pending_blocks or more_pending_blocks:
        if not pending_blocks:
            r.shuffle(more_pending_blocks)
            pending_blocks = more_pending_blocks
            more_pending_blocks = []
        block, builder, args_gv = pending_blocks.pop()
        builder.start_writing()

        # the following loop generates a chain of blocks
        # (a branch in the graph)
        while True:
            assert len(args_gv) == len(block.inputargs)
            if len(entrymap[block]) > 1:
                # need a label at the start of this block
                if block in labels:
                    # already got one, jump to it
                    label = labels[block]
                    if label is not None:
                        builder.finish_and_goto(args_gv, labels[block])
                    else:
                        [retvar] = args_gv
                        builder.finish_and_return(sigtoken, retvar)
                    break    # done along this branch
                else:
                    # create a label and proceed
                    kinds = map(varkind, block.inputargs)
                    labels[block] = builder.enter_next_block(kinds, args_gv)

            # generate the operations
            varmap = dict(zip(block.inputargs, args_gv))
            for op in block.operations:
                gv_result = generate_operation(rgenop, builder, op, var2gv)
                varmap[op.result] = gv_result

            if block.exitswitch is None:
                [link] = block.exits
            else:
                if block.exitswitch.concretetype is not lltype.Bool:
                    raise NotImplementedError("XXX switches")
                i = r.randrange(0, 2)
                jumplink = block.exits[i]
                args_gv = map(var2gv, jumplink.args)
                if jumplink.exitcase:
                    meth = builder.jump_if_true
                else:
                    meth = builder.jump_if_false
                vars_gv = {}
                for v in args_gv:
                    if not v.is_const:
                        vars_gv[v] = True
                newbuilder = meth(varmap[block.exitswitch], vars_gv.keys())
                more_pending_blocks.append((jumplink.target,
                                            newbuilder,
                                            args_gv))
                link = block.exits[1-i]

            args_gv = map(var2gv, link.args)
            block = link.target

    builder.end()
    return gv_entrypoint
Exemplo n.º 33
0
def merge_if_blocks_once(graph):
    """Convert consecutive blocks that all compare a variable (of Primitive type)
    with a constant into one block with multiple exits. The backends can in
    turn output this block as a switch statement.
    """
    candidates = [
        block for block in graph.iterblocks()
        if is_chain_block(block, first=True)
    ]
    entrymap = mkentrymap(graph)
    for firstblock in candidates:
        chain = []
        checkvars = []
        varmap = {}  # {var in a block in the chain: var in the first block}
        for var in firstblock.exits[0].args:
            varmap[var] = var
        for var in firstblock.exits[1].args:
            varmap[var] = var

        def add_to_varmap(var, newvar):
            if isinstance(var, Variable):
                varmap[newvar] = varmap[var]
            else:
                varmap[newvar] = var

        current = firstblock
        while 1:
            # check whether the chain can be extended with the block that follows the
            # False link
            checkvar = [
                var for var in current.operations[-1].args
                if isinstance(var, Variable)
            ][0]
            resvar = current.operations[-1].result
            case = [
                var for var in current.operations[-1].args
                if isinstance(var, Constant)
            ][0]
            checkvars.append(checkvar)
            falseexit = current.exits[0]
            assert not falseexit.exitcase
            trueexit = current.exits[1]
            targetblock = falseexit.target
            # if the result of the check is also passed through the link, we
            # cannot construct the chain
            if resvar in falseexit.args or resvar in trueexit.args:
                break
            chain.append((current, case))
            if len(entrymap[targetblock]) != 1:
                break
            if checkvar not in falseexit.args:
                break
            newcheckvar = targetblock.inputargs[falseexit.args.index(checkvar)]
            if not is_chain_block(targetblock):
                break
            if newcheckvar not in targetblock.operations[0].args:
                break
            for i, var in enumerate(trueexit.args):
                add_to_varmap(var, trueexit.target.inputargs[i])
            for i, var in enumerate(falseexit.args):
                add_to_varmap(var, falseexit.target.inputargs[i])
            current = targetblock
        if len(chain) > 1:
            break
    else:
        return False
    merge_chain(chain, checkvars[0], varmap, graph)
    checkgraph(graph)
    return True