def create_exception_handling(self, graph): """After an exception in a direct_call (or indirect_call), that is not caught by an explicit except statement, we need to reraise the exception. So after this direct_call we need to test if an exception had occurred. If so, we return from the current graph with a special value (False/-1/-1.0/null). Because of the added exitswitch we need an additional block. """ if hasattr(graph, 'exceptiontransformed'): assert self.same_obj(self.exc_data_ptr, graph.exceptiontransformed) return else: self.raise_analyzer.analyze_direct_call(graph) graph.exceptiontransformed = self.exc_data_ptr join_blocks(graph) # collect the blocks before changing them n_need_exc_matching_blocks = 0 n_gen_exc_checks = 0 # entrymap = mkentrymap(graph) if graph.exceptblock in entrymap: for link in entrymap[graph.exceptblock]: self.transform_jump_to_except_block(graph, entrymap, link) # for block in list(graph.iterblocks()): self.replace_fetch_restore_operations(block) need_exc_matching, gen_exc_checks = self.transform_block( graph, block) n_need_exc_matching_blocks += need_exc_matching n_gen_exc_checks += gen_exc_checks cleanup_graph(graph) return n_need_exc_matching_blocks, n_gen_exc_checks
def test_replace_exitswitch_by_constant_bug(): class X: pass def constant9(): x = X() x.n = 3 x.n = 9 return x.n def fn(): n = constant9() if n == 1: return 5 elif n == 2: return 6 elif n == 3: return 8 elif n == 4: return -123 elif n == 5: return 12973 else: return n t = TranslationContext() a = t.buildannotator() a.build_types(fn, []) rtyper = t.buildrtyper() rtyper.specialize() graph = t.graphs[0] remove_same_as(graph) merge_if_blocks_once(graph) from rpython.translator.backendopt import malloc, inline inline.auto_inlining(t, 20) malloc.remove_mallocs(t, t.graphs) from rpython.translator import simplify simplify.join_blocks(graph)
def create_exception_handling(self, graph): """After an exception in a direct_call (or indirect_call), that is not caught by an explicit except statement, we need to reraise the exception. So after this direct_call we need to test if an exception had occurred. If so, we return from the current graph with a special value (False/-1/-1.0/null). Because of the added exitswitch we need an additional block. """ if hasattr(graph, 'exceptiontransformed'): assert self.same_obj(self.exc_data_ptr, graph.exceptiontransformed) return else: self.raise_analyzer.analyze_direct_call(graph) graph.exceptiontransformed = self.exc_data_ptr join_blocks(graph) # collect the blocks before changing them n_need_exc_matching_blocks = 0 n_gen_exc_checks = 0 # entrymap = mkentrymap(graph) if graph.exceptblock in entrymap: for link in entrymap[graph.exceptblock]: self.transform_jump_to_except_block(graph, entrymap, link) # for block in list(graph.iterblocks()): self.replace_fetch_restore_operations(block) need_exc_matching, gen_exc_checks = self.transform_block(graph, block) n_need_exc_matching_blocks += need_exc_matching n_gen_exc_checks += gen_exc_checks cleanup_graph(graph) return n_need_exc_matching_blocks, n_gen_exc_checks
def test_wearetranslated(self): x = self.codetest(self.wearetranslated) from rpython.translator.simplify import join_blocks join_blocks(x) # check that 'x' is an empty graph assert len(x.startblock.operations) == 0 assert len(x.startblock.exits) == 1 assert x.startblock.exits[0].target is x.returnblock
def test_specialcases(self): x = self.codetest(self.specialcases) from rpython.translator.simplify import join_blocks join_blocks(x) assert len(x.startblock.operations) == 14 for op in x.startblock.operations: assert op.opname in ['lt', 'le', 'eq', 'ne', 'gt', 'ge', 'is_', 'xor'] assert len(op.args) == 2 assert op.args[1].value == 3
def virtualize_mallocs(translator, graphs, verbose=False): newgraphs = graphs[:] mallocv = MallocVirtualizer(newgraphs, translator.rtyper, verbose) while mallocv.remove_mallocs_once(): pass for graph in newgraphs: checkgraph(graph) join_blocks(graph) assert newgraphs[:len(graphs)] == graphs del newgraphs[:len(graphs)] translator.graphs.extend(newgraphs)
def remove_duplicate_casts(graph, translator): simplify.join_blocks(graph) num_removed = 0 # remove chains of casts for block in graph.iterblocks(): comes_from = {} for op in block.operations: if op.opname == "cast_pointer": if op.args[0] in comes_from: from_var = comes_from[op.args[0]] comes_from[op.result] = from_var if from_var.concretetype == op.result.concretetype: op.opname = "same_as" op.args = [from_var] num_removed += 1 else: op.args = [from_var] else: comes_from[op.result] = op.args[0] if num_removed: remove_same_as(graph) # remove duplicate casts for block in graph.iterblocks(): available = {} for op in block.operations: if op.opname == "cast_pointer": key = (op.args[0], op.result.concretetype) if key in available: op.opname = "same_as" op.args = [available[key]] num_removed += 1 else: available[key] = op.result if num_removed: remove_same_as(graph) # remove casts with unused results for block in graph.iterblocks(): used = {} for link in block.exits: for arg in link.args: used[arg] = True for i, op in list(enumerate(block.operations))[::-1]: if op.opname == "cast_pointer" and op.result not in used: del block.operations[i] num_removed += 1 else: for arg in op.args: used[arg] = True if translator.config.translation.verbose: log.removecasts("removed %s cast_pointers in %s" % (num_removed, graph.name)) return num_removed
def remove_duplicate_casts(graph, translator): simplify.join_blocks(graph) num_removed = 0 # remove chains of casts for block in graph.iterblocks(): comes_from = {} for op in block.operations: if op.opname == "cast_pointer": if op.args[0] in comes_from: from_var = comes_from[op.args[0]] comes_from[op.result] = from_var if from_var.concretetype == op.result.concretetype: op.opname = "same_as" op.args = [from_var] num_removed += 1 else: op.args = [from_var] else: comes_from[op.result] = op.args[0] if num_removed: remove_same_as(graph) # remove duplicate casts for block in graph.iterblocks(): available = {} for op in block.operations: if op.opname == "cast_pointer": key = (op.args[0], op.result.concretetype) if key in available: op.opname = "same_as" op.args = [available[key]] num_removed += 1 else: available[key] = op.result if num_removed: remove_same_as(graph) # remove casts with unused results for block in graph.iterblocks(): used = {} for link in block.exits: for arg in link.args: used[arg] = True for i, op in list(enumerate(block.operations))[::-1]: if op.opname == "cast_pointer" and op.result not in used: del block.operations[i] num_removed += 1 else: for arg in op.args: used[arg] = True if translator.config.translation.verbose: log.removecasts( "removed %s cast_pointers in %s" % (num_removed, graph.name)) return num_removed
def remove_asserts(translator, graphs): rtyper = translator.rtyper excdata = rtyper.exceptiondata clsdef = translator.annotator.bookkeeper.getuniqueclassdef(AssertionError) ll_AssertionError = excdata.get_standard_ll_exc_instance(rtyper, clsdef) total_count = [0, 0] for graph in graphs: count = 0 morework = True while morework: morework = False eliminate_empty_blocks(graph) join_blocks(graph) for link in graph.iterlinks(): if (link.target is graph.exceptblock and isinstance(link.args[1], Constant) and link.args[1].value == ll_AssertionError): if kill_assertion_link(graph, link): count += 1 morework = True break else: total_count[0] += 1 if translator.config.translation.verbose: log.removeassert( "cannot remove an assert from %s" % (graph.name, )) if count: # now melt away the (hopefully) dead operation that compute # the condition total_count[1] += count if translator.config.translation.verbose: log.removeassert("removed %d asserts in %s" % (count, graph.name)) checkgraph(graph) total_count = tuple(total_count) if total_count[0] == 0: if total_count[1] == 0: msg = None else: msg = "Removed %d asserts" % (total_count[1], ) else: if total_count[1] == 0: msg = "Could not remove %d asserts" % (total_count[0], ) else: msg = "Could not remove %d asserts, but removed %d asserts." % total_count if msg is not None: log.removeassert(msg)
def transform_func(self, fn, inputtypes, backendopt=False): t = TranslationContext() t.buildannotator().build_types(fn, inputtypes) t.buildrtyper().specialize() if option.view: t.view() if backendopt: backend_optimizations(t) g = graphof(t, fn) etrafo = exceptiontransform.ExceptionTransformer(t) etrafo.create_exception_handling(g) join_blocks(g) if option.view: t.view() return t, g
def remove_asserts(translator, graphs): rtyper = translator.rtyper excdata = rtyper.exceptiondata clsdef = translator.annotator.bookkeeper.getuniqueclassdef(AssertionError) ll_AssertionError = excdata.get_standard_ll_exc_instance(rtyper, clsdef) total_count = [0, 0] for graph in graphs: count = 0 morework = True while morework: morework = False eliminate_empty_blocks(graph) join_blocks(graph) for link in graph.iterlinks(): if (link.target is graph.exceptblock and isinstance(link.args[1], Constant) and link.args[1].value == ll_AssertionError): if kill_assertion_link(graph, link): count += 1 morework = True break else: total_count[0] += 1 if translator.config.translation.verbose: log.removeassert("cannot remove an assert from %s" % (graph.name,)) if count: # now melt away the (hopefully) dead operation that compute # the condition total_count[1] += count if translator.config.translation.verbose: log.removeassert("removed %d asserts in %s" % (count, graph.name)) checkgraph(graph) total_count = tuple(total_count) if total_count[0] == 0: if total_count[1] == 0: msg = None else: msg = "Removed %d asserts" % (total_count[1],) else: if total_count[1] == 0: msg = "Could not remove %d asserts" % (total_count[0],) else: msg = "Could not remove %d asserts, but removed %d asserts." % total_count if msg is not None: log.removeassert(msg)
def remove_tail_calls_to_self(translator, graph): entrymap = mkentrymap(graph) changed = False for link in entrymap[graph.returnblock]: block = link.prevblock if (len(block.exits) == 1 and len(block.operations) > 0 and block.operations[-1].opname == 'direct_call' and block.operations[-1].result == link.args[0]): print "getgraph", graph if graph is graph: _remove_tail_call(translator, graph, block) changed = True if changed: from rpython.translator import simplify checkgraph(graph) simplify.remove_identical_vars(graph) simplify.eliminate_empty_blocks(graph) simplify.join_blocks(graph)
def __init__(self, translator, graph, inline_func, lltype_to_classdef, inline_guarded_calls=False, inline_guarded_calls_no_matter_what=False, raise_analyzer=None, call_count_pred=None, cleanup=True): BaseInliner.__init__(self, translator, graph, lltype_to_classdef, inline_guarded_calls, inline_guarded_calls_no_matter_what, raise_analyzer, call_count_pred, cleanup) self.inline_func = inline_func # to simplify exception matching join_blocks(graph) # find callsites *after* joining blocks... callsites = find_callsites(graph, inline_func) self.block_to_index = {} for g, block, i in callsites: self.block_to_index.setdefault(block, {})[i] = g
def test_tweak_generator_graph(self): def f(n, x, y, z): z *= 10 yield n + 1 z -= 10 # graph = make_generator_entry_graph(f) func1 = graph._tweaked_func if option.view: graph.show() GeneratorIterator = graph._tweaked_func._generator_next_method_of_ assert hasattr(GeneratorIterator, 'next') # graph_next = build_flow(GeneratorIterator.next.im_func) join_blocks(graph_next) if option.view: graph_next.show() # graph1 = build_flow(func1) if option.view: graph1.show()
def test_tweak_generator_graph(self): def f(n, x, y, z): z *= 10 yield n + 1 z -= 10 # graph = build_flow(f) GeneratorIterator = make_generatoriterator_class(graph) replace_graph_with_bootstrap(GeneratorIterator, graph) func1 = attach_next_method(GeneratorIterator, graph) if option.view: graph.show() # assert func1._generator_next_method_of_ is GeneratorIterator assert hasattr(GeneratorIterator, 'next') # graph_next = build_flow(GeneratorIterator.next.im_func) join_blocks(graph_next) if option.view: graph_next.show() # graph1 = build_flow(func1) if option.view: graph1.show()
def add_enter_leave_roots_frame(graph, regalloc, c_gcdata): # put 'gc_enter_roots_frame' as late as possible, but before the # first 'gc_save_root' is reached. # # put the 'gc_leave_roots_frame' operations as early as possible, # that is, just after the last 'gc_restore_root' reached. This is # done by putting it along a link, such that the previous block # contains a 'gc_restore_root' and from the next block it is not # possible to reach any extra 'gc_restore_root'; then, as doing # this is not as precise as we'd like, we first break every block # just after their last 'gc_restore_root'. if regalloc is None: return # break blocks after their last 'gc_restore_root', unless they # are already at the last position for block in graph.iterblocks(): ops = block.operations for i in range(len(ops)-1, -1, -1): if ops[i].opname == 'gc_restore_root': if i < len(ops) - 1: split_block(block, i + 1) break # done insert_empty_startblock(graph) entrymap = mkentrymap(graph) # helpers def is_interesting_op(op): if op.opname == 'gc_restore_root': return True if op.opname == 'gc_save_root': # ignore saves that say "everything is free" return not (isinstance(op.args[1], Constant) and isinstance(op.args[1].value, int) and op.args[1].value == bitmask_all_free) return False bitmask_all_free = (1 << regalloc.numcolors) - 1 def insert_along_link(link, opname, args, cache): b2 = link.target if b2 not in cache: newblock = Block([v.copy() for v in b2.inputargs]) newblock.operations.append( SpaceOperation(opname, args, varoftype(lltype.Void))) newblock.closeblock(Link(list(newblock.inputargs), b2)) cache[b2] = newblock link.target = cache[b2] # make a list of blocks with gc_save_root/gc_restore_root in them interesting_blocks = [] for block in graph.iterblocks(): for op in block.operations: if is_interesting_op(op): assert block is not graph.startblock assert block is not graph.returnblock interesting_blocks.append(block) break # interrupt this block, go to the next one # compute the blocks such that 'gc_save_root/gc_restore_root' # exist anywhere before the start of this block before_blocks = set() pending = list(interesting_blocks) seen = set(pending) while pending: block = pending.pop() for link in block.exits: before_blocks.add(link.target) if link.target not in seen: seen.add(link.target) pending.append(link.target) assert graph.startblock not in before_blocks # compute the blocks such that 'gc_save_root/gc_restore_root' # exist anywhere after the start of this block after_blocks = set(interesting_blocks) pending = list(interesting_blocks) while pending: block = pending.pop() for link in entrymap[block]: if link.prevblock is not None: if link.prevblock not in after_blocks: after_blocks.add(link.prevblock) pending.append(link.prevblock) assert graph.returnblock not in after_blocks # this is the set of blocks such that, at the start of the block, # we're "in frame", i.e. there are 'gc_save_root/gc_restore_root' # both before and after the start of the block. inside_blocks = before_blocks & after_blocks inside_or_interesting_blocks = set(interesting_blocks) | inside_blocks # if a block contains gc_save_root/gc_restore_root but is not # an "inside_block", then add gc_enter_roots_frame where needed c_num = Constant(regalloc.numcolors, lltype.Signed) for block in interesting_blocks: if block not in inside_blocks: i = 0 while not is_interesting_op(block.operations[i]): i += 1 block.operations.insert(i, SpaceOperation('gc_enter_roots_frame', [c_gcdata, c_num], varoftype(lltype.Void))) # If a link goes from a "non-inside, non-interesting block" # straight to an "inside_block", insert a gc_enter_roots_frame # along the link. Similarly, if a block is a "inside-or- # interesting_block" and exits with a link going to a # "non-inside_block", then insert a gc_leave_roots_frame along the # link. cache1 = {} cache2 = {} for block in list(graph.iterblocks()): if block not in inside_or_interesting_blocks: for link in block.exits: if link.target in inside_blocks: insert_along_link(link, 'gc_enter_roots_frame', [c_gcdata, c_num], cache1) else: for link in block.exits: if link.target not in inside_blocks: insert_along_link(link, 'gc_leave_roots_frame', [], cache2) # check all blocks not in "inside_block": they might contain a # gc_save_root() that writes the bitmask meaning "everything is # free". Look only before gc_enter_roots_frame, if there is one # in that block. Remove these out-of-frame gc_save_root(). for block in graph.iterblocks(): if block not in inside_blocks: newops = [] for i, op in enumerate(block.operations): if op.opname == 'gc_enter_roots_frame': newops.extend(block.operations[i:]) break if op.opname == 'gc_save_root' and not is_interesting_op(op): pass # don't add in newops else: newops.append(op) if len(newops) < len(block.operations): block.operations = newops join_blocks(graph) # for the extra new blocks made in this function
def partial_escape(translator, graph): """ Main function. Blocks, which we'll work on, are in a dequeue, called "worklist", and are indexing link-state tuples in "statemap". """ insert_links(graph) worklist = deque([graph.startblock]) statemap = defaultdict(list) statemap[graph.startblock] = [(None, {})] finished = set() entrymap = mkentrymap(graph) backedges = find_backedges(graph) number_getfield_removed = 0 while worklist: block = worklist.popleft() must_be_materialized = block.is_final_block() for link in entrymap[block]: if link in backedges: must_be_materialized = True state = get_current_state(statemap[block], must_be_materialized=must_be_materialized) if block.is_final_block(): continue new_operations = [] # Going through the operations for op in block.operations: if op.opname == 'malloc': # Create new entry for every allocation that is not returned if can_remove(op): vobj = VirtualObject(op.result.concretetype, op.args) state[op.result] = vobj vobj.aliases.add(op.result) else: new_operations.append(op) elif op.opname == 'cast_pointer': if op.args[0] in state: # Creating something like an 'alias' for the casting state[op.result] = vobj = state[op.args[0]] vobj.aliases.add(op.result) else: new_operations.append(op) elif op.opname == 'setfield': if op.args[0] in state: state[op.args[0]].vars[op.args[1].value, op.args[0].concretetype] = op.args[2] else: materialize_object(op.args[2], state, new_operations) new_operations.append(op) elif op.opname == 'getfield': key = op.args[1].value, op.args[0].concretetype if op.args[0] in state and key in state[op.args[0]].vars: targ = state[op.args[0]].vars[key] number_getfield_removed += 1 if targ in state: state[op.result] = vobj = state[targ] state[targ].aliases.add(vobj) else: new_operations.append(SpaceOperation('same_as', [targ], op.result)) else: materialize_object(op.args[0], state, new_operations) new_operations.append(op) else: for arg in op.args: materialize_object(arg, state, new_operations) new_operations.append(op) # for all backedges, materialize all arguments (loops aren't supported # properly yet) for exit in block.exits: if exit in backedges or exit.target.is_final_block(): for arg in exit.args: materialize_object(arg, state, new_operations) block.operations = new_operations # We're done with the internals of the block. Editing the lists: finished.add(block) for exit in block.exits: # Only adding to the worklist if all its ancestors are processed for lnk in entrymap[exit.target]: if lnk.prevblock not in finished and lnk not in backedges: break else: if exit.target not in finished and exit.target not in worklist: # XXX worklist.append(exit.target) # setting statemaps: statemap[exit.target].append((exit, state)) if number_getfield_removed: if translator.config.translation.verbose: log.cse("partial escape analysis removed %s getfields in graph %s" % (number_getfield_removed, graph)) else: log.dot() # Done. Cleaning up. remove_same_as(graph) transform_dead_op_vars(graph) eliminate_empty_blocks(graph) join_blocks(graph) checkgraph(graph) return number_getfield_removed