def insert_ref_count_opcodes(ir: FuncIR) -> None: """Insert reference count inc/dec opcodes to a function. This is the entry point to this module. """ cfg = get_cfg(ir.blocks) values = all_values(ir.arg_regs, ir.blocks) borrowed = {value for value in values if value.is_borrowed} args = set(ir.arg_regs) # type: Set[Value] live = analyze_live_regs(ir.blocks, cfg) borrow = analyze_borrowed_arguments(ir.blocks, cfg, borrowed) defined = analyze_must_defined_regs(ir.blocks, cfg, args, values) ordering = make_value_ordering(ir) cache = {} # type: BlockCache for block in ir.blocks[:]: if isinstance(block.ops[-1], (Branch, Goto)): insert_branch_inc_and_decrefs(block, cache, ir.blocks, live.before, borrow.before, borrow.after, defined.after, ordering) transform_block(block, live.before, live.after, borrow.before, defined.after) cleanup_cfg(ir.blocks)
def insert_ref_count_opcodes(ir: FuncIR) -> None: """Insert reference count inc/dec opcodes to a function. This is the entry point to this module. """ cfg = get_cfg(ir.blocks) borrowed = set(reg for reg in ir.env.regs() if reg.is_borrowed) args = set(reg for reg in ir.env.regs() if ir.env.indexes[reg] < len(ir.args)) regs = [reg for reg in ir.env.regs() if isinstance(reg, Register)] live = analyze_live_regs(ir.blocks, cfg) borrow = analyze_borrowed_arguments(ir.blocks, cfg, borrowed) defined = analyze_must_defined_regs(ir.blocks, cfg, args, regs) cache = {} # type: BlockCache for block in ir.blocks[:]: if isinstance(block.ops[-1], (Branch, Goto)): insert_branch_inc_and_decrefs(block, cache, ir.blocks, live.before, borrow.before, borrow.after, defined.after, ir.env) transform_block(block, live.before, live.after, borrow.before, defined.after, ir.env) # Find all the xdecs we inserted and note the registers down as # needing to be initialized. for block in ir.blocks: for op in block.ops: if isinstance(op, DecRef) and op.is_xdec: ir.env.vars_needing_init.add(op.src) cleanup_cfg(ir.blocks)
def analyze_always_defined_attrs_in_class(cl: ClassIR, seen: Set[ClassIR]) -> None: if cl in seen: return seen.add(cl) if (cl.is_trait or cl.inherits_python or cl.allow_interpreted_subclasses or cl.builtin_base is not None or cl.children is None or cl.is_serializable()): # Give up -- we can't enforce that attributes are always defined. return # First analyze all base classes. Track seen classes to avoid duplicate work. for base in cl.mro[1:]: analyze_always_defined_attrs_in_class(base, seen) m = cl.get_method('__init__') if m is None: cl._always_initialized_attrs = cl.attrs_with_defaults.copy() cl._sometimes_initialized_attrs = cl.attrs_with_defaults.copy() return self_reg = m.arg_regs[0] cfg = get_cfg(m.blocks) dirty = analyze_self_leaks(m.blocks, self_reg, cfg) maybe_defined = analyze_maybe_defined_attrs_in_init( m.blocks, self_reg, cl.attrs_with_defaults, cfg) all_attrs: Set[str] = set() for base in cl.mro: all_attrs.update(base.attributes) maybe_undefined = analyze_maybe_undefined_attrs_in_init( m.blocks, self_reg, initial_undefined=all_attrs - cl.attrs_with_defaults, cfg=cfg) always_defined = find_always_defined_attributes(m.blocks, self_reg, all_attrs, maybe_defined, maybe_undefined, dirty) always_defined = {a for a in always_defined if not cl.is_deletable(a)} cl._always_initialized_attrs = always_defined if dump_always_defined: print(cl.name, sorted(always_defined)) cl._sometimes_initialized_attrs = find_sometimes_defined_attributes( m.blocks, self_reg, maybe_defined, dirty) mark_attr_initialiation_ops(m.blocks, self_reg, maybe_defined, dirty) # Check if __init__ can run unpredictable code (leak 'self'). any_dirty = False for b in m.blocks: for i, op in enumerate(b.ops): if dirty.after[b, i] and not isinstance(op, Return): any_dirty = True break cl.init_self_leak = any_dirty
def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a data-flow analysis test case.""" with use_custom_builtins( os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): testcase.output = replace_native_int(testcase.output) try: ir = build_ir_for_single_file(testcase.input) except CompileError as e: actual = e.messages else: actual = [] for fn in ir: if (fn.name == TOP_LEVEL_NAME and not testcase.name.endswith('_toplevel')): continue exceptions.insert_exception_handling(fn) actual.extend(format_func(fn)) cfg = dataflow.get_cfg(fn.blocks) args = set(fn.arg_regs) # type: Set[Value] name = testcase.name if name.endswith('_MaybeDefined'): # Forward, maybe analysis_result = dataflow.analyze_maybe_defined_regs( fn.blocks, cfg, args) elif name.endswith('_Liveness'): # Backward, maybe analysis_result = dataflow.analyze_live_regs( fn.blocks, cfg) elif name.endswith('_MustDefined'): # Forward, must analysis_result = dataflow.analyze_must_defined_regs( fn.blocks, cfg, args, regs=all_values(fn.arg_regs, fn.blocks)) elif name.endswith('_BorrowedArgument'): # Forward, must analysis_result = dataflow.analyze_borrowed_arguments( fn.blocks, cfg, args) else: assert False, 'No recognized _AnalysisName suffix in test case' names = generate_names_for_ir(fn.arg_regs, fn.blocks) for key in sorted(analysis_result.before.keys(), key=lambda x: (x[0].label, x[1])): pre = ', '.join( sorted(names[reg] for reg in analysis_result.before[key])) post = ', '.join( sorted(names[reg] for reg in analysis_result.after[key])) actual.append('%-8s %-23s %s' % ((key[0].label, key[1]), '{%s}' % pre, '{%s}' % post)) assert_test_output(testcase, actual, 'Invalid source code output')
def insert_uninit_checks(ir: FuncIR) -> None: # Remove dead blocks from the CFG, which helps avoid spurious # checks due to unused error handling blocks. cleanup_cfg(ir.blocks) cfg = get_cfg(ir.blocks) must_defined = analyze_must_defined_regs( ir.blocks, cfg, set(ir.arg_regs), all_values(ir.arg_regs, ir.blocks)) ir.blocks = split_blocks_at_uninits(ir.blocks, must_defined.before)
def insert_uninit_checks(ir: FuncIR) -> None: # Remove dead blocks from the CFG, which helps avoid spurious # checks due to unused error handling blocks. cleanup_cfg(ir.blocks) cfg = get_cfg(ir.blocks) args = set(reg for reg in ir.env.regs() if ir.env.indexes[reg] < len(ir.args)) must_defined = analyze_must_defined_regs(ir.blocks, cfg, args, ir.env.regs()) ir.blocks = split_blocks_at_uninits(ir.env, ir.blocks, must_defined.before)