def insert_ref_count_opcodes(ir: FuncIR) -> None: """Insert reference count inc/dec opcodes to a function. This is the entry point to this module. """ cfg = get_cfg(ir.blocks) values = all_values(ir.arg_regs, ir.blocks) borrowed = {value for value in values if value.is_borrowed} args = set(ir.arg_regs) # type: Set[Value] live = analyze_live_regs(ir.blocks, cfg) borrow = analyze_borrowed_arguments(ir.blocks, cfg, borrowed) defined = analyze_must_defined_regs(ir.blocks, cfg, args, values) ordering = make_value_ordering(ir) cache = {} # type: BlockCache for block in ir.blocks[:]: if isinstance(block.ops[-1], (Branch, Goto)): insert_branch_inc_and_decrefs(block, cache, ir.blocks, live.before, borrow.before, borrow.after, defined.after, ordering) transform_block(block, live.before, live.after, borrow.before, defined.after) cleanup_cfg(ir.blocks)
def insert_ref_count_opcodes(ir: FuncIR) -> None: """Insert reference count inc/dec opcodes to a function. This is the entry point to this module. """ cfg = get_cfg(ir.blocks) borrowed = set(reg for reg in ir.env.regs() if reg.is_borrowed) args = set(reg for reg in ir.env.regs() if ir.env.indexes[reg] < len(ir.args)) regs = [reg for reg in ir.env.regs() if isinstance(reg, Register)] live = analyze_live_regs(ir.blocks, cfg) borrow = analyze_borrowed_arguments(ir.blocks, cfg, borrowed) defined = analyze_must_defined_regs(ir.blocks, cfg, args, regs) cache = {} # type: BlockCache for block in ir.blocks[:]: if isinstance(block.ops[-1], (Branch, Goto)): insert_branch_inc_and_decrefs(block, cache, ir.blocks, live.before, borrow.before, borrow.after, defined.after, ir.env) transform_block(block, live.before, live.after, borrow.before, defined.after, ir.env) # Find all the xdecs we inserted and note the registers down as # needing to be initialized. for block in ir.blocks: for op in block.ops: if isinstance(op, DecRef) and op.is_xdec: ir.env.vars_needing_init.add(op.src) cleanup_cfg(ir.blocks)
def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a data-flow analysis test case.""" with use_custom_builtins( os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): testcase.output = replace_native_int(testcase.output) try: ir = build_ir_for_single_file(testcase.input) except CompileError as e: actual = e.messages else: actual = [] for fn in ir: if (fn.name == TOP_LEVEL_NAME and not testcase.name.endswith('_toplevel')): continue exceptions.insert_exception_handling(fn) actual.extend(format_func(fn)) cfg = dataflow.get_cfg(fn.blocks) args = set(fn.arg_regs) # type: Set[Value] name = testcase.name if name.endswith('_MaybeDefined'): # Forward, maybe analysis_result = dataflow.analyze_maybe_defined_regs( fn.blocks, cfg, args) elif name.endswith('_Liveness'): # Backward, maybe analysis_result = dataflow.analyze_live_regs( fn.blocks, cfg) elif name.endswith('_MustDefined'): # Forward, must analysis_result = dataflow.analyze_must_defined_regs( fn.blocks, cfg, args, regs=all_values(fn.arg_regs, fn.blocks)) elif name.endswith('_BorrowedArgument'): # Forward, must analysis_result = dataflow.analyze_borrowed_arguments( fn.blocks, cfg, args) else: assert False, 'No recognized _AnalysisName suffix in test case' names = generate_names_for_ir(fn.arg_regs, fn.blocks) for key in sorted(analysis_result.before.keys(), key=lambda x: (x[0].label, x[1])): pre = ', '.join( sorted(names[reg] for reg in analysis_result.before[key])) post = ', '.join( sorted(names[reg] for reg in analysis_result.after[key])) actual.append('%-8s %-23s %s' % ((key[0].label, key[1]), '{%s}' % pre, '{%s}' % post)) assert_test_output(testcase, actual, 'Invalid source code output')
def insert_uninit_checks(ir: FuncIR) -> None: # Remove dead blocks from the CFG, which helps avoid spurious # checks due to unused error handling blocks. cleanup_cfg(ir.blocks) cfg = get_cfg(ir.blocks) must_defined = analyze_must_defined_regs( ir.blocks, cfg, set(ir.arg_regs), all_values(ir.arg_regs, ir.blocks)) ir.blocks = split_blocks_at_uninits(ir.blocks, must_defined.before)
def insert_uninit_checks(ir: FuncIR) -> None: # Remove dead blocks from the CFG, which helps avoid spurious # checks due to unused error handling blocks. cleanup_cfg(ir.blocks) cfg = get_cfg(ir.blocks) args = set(reg for reg in ir.env.regs() if ir.env.indexes[reg] < len(ir.args)) must_defined = analyze_must_defined_regs(ir.blocks, cfg, args, ir.env.regs()) ir.blocks = split_blocks_at_uninits(ir.env, ir.blocks, must_defined.before)