def compile_modules_to_c(sources: List[BuildSource], module_names: List[str], options: Options, alt_lib_path: Optional[str] = None) -> str: """Compile Python module(s) to C that can be used from Python C extension modules.""" assert options.strict_optional, 'strict_optional must be turned on' result = build(sources=sources, options=options, alt_lib_path=alt_lib_path) if result.errors: raise CompileError(result.errors) # Generate basic IR, with missing exception and refcount handling. file_nodes = [result.files[name] for name in module_names] modules = genops.build_ir(file_nodes, result.types) # Insert exception handling. for _, module in modules: for fn in module.functions: insert_exception_handling(fn) # Insert refcount handling. for _, module in modules: for fn in module.functions: insert_ref_count_opcodes(fn) # Generate C code. source_paths = { module_name: result.files[module_name].path for module_name in module_names } generator = ModuleGenerator(modules, source_paths) return generator.generate_c_for_modules()
def build_ir_for_single_file( input_lines: List[str], compiler_options: Optional[CompilerOptions] = None) -> List[FuncIR]: program_text = '\n'.join(input_lines) compiler_options = compiler_options or CompilerOptions() options = Options() options.show_traceback = True options.use_builtins_fixtures = True options.strict_optional = True options.python_version = (3, 6) options.export_types = True options.preserve_asts = True options.per_module_options['__main__'] = {'mypyc': True} source = build.BuildSource('main', '__main__', program_text) # Construct input as a single single. # Parse and type check the input program. result = build.build(sources=[source], options=options, alt_lib_path=test_temp_dir) if result.errors: raise CompileError(result.errors) _, modules, errors = genops.build_ir([result.files['__main__']], result.graph, result.types, compiler_options) assert errors == 0 module = modules[0][1] return module.functions
def compile_modules_to_c(result: BuildResult, module_names: List[str], use_shared_lib: bool, ops: Optional[List[str]] = None) -> str: """Compile Python module(s) to C that can be used from Python C extension modules.""" # Generate basic IR, with missing exception and refcount handling. file_nodes = [result.files[name] for name in module_names] literals, modules = genops.build_ir(file_nodes, result.graph, result.types) # Insert exception handling. for _, module in modules: for fn in module.functions: insert_exception_handling(fn) # Insert refcount handling. for _, module in modules: for fn in module.functions: insert_ref_count_opcodes(fn) # Format ops for debugging if ops is not None: for _, module in modules: for fn in module.functions: ops.extend(format_func(fn)) ops.append('') # Generate C code. source_paths = { module_name: result.files[module_name].path for module_name in module_names } generator = ModuleGenerator(literals, modules, source_paths, use_shared_lib) return generator.generate_c_for_modules()
def compile_module_to_c(sources: List[BuildSource], module_name: str, options: Options, alt_lib_path: str) -> str: """Compile a Python module to source for a Python C extension module.""" assert options.strict_optional, 'strict_optional must be turned on' result = build(sources=sources, options=options, alt_lib_path=alt_lib_path) if result.errors: raise CompileError(result.errors) module = genops.build_ir(result.files[module_name], result.types) for fn in module.functions: insert_ref_count_opcodes(fn) generator = ModuleGenerator(module_name, module) return generator.generate_c_module()
def compile_scc_to_ir( scc: List[MypyFile], result: BuildResult, mapper: genops.Mapper, compiler_options: CompilerOptions, errors: Errors, ) -> ModuleIRs: """Compile an SCC into ModuleIRs. Any modules that this SCC depends on must have either compiled or loaded from a cache into mapper. Arguments: scc: The list of MypyFiles to compile result: The BuildResult from the mypy front-end mapper: The Mapper object mapping mypy ASTs to class and func IRs compiler_options: The compilation options errors: Where to report any errors encountered Returns the IR of the modules. """ if compiler_options.verbose: print("Compiling {}".format(", ".join(x.name for x in scc))) # Generate basic IR, with missing exception and refcount handling. modules = genops.build_ir(scc, result.graph, result.types, mapper, compiler_options, errors) if errors.num_errors > 0: return modules # Insert uninit checks. for module in modules.values(): for fn in module.functions: insert_uninit_checks(fn) # Insert exception handling. for module in modules.values(): for fn in module.functions: insert_exception_handling(fn) # Insert refcount handling. for module in modules.values(): for fn in module.functions: insert_ref_count_opcodes(fn) return modules
def build_ir_for_single_file(input_lines: List[str]) -> List[FuncIR]: program_text = '\n'.join(input_lines) options = Options() options.show_traceback = True options.use_builtins_fixtures = True options.strict_optional = True source = build.BuildSource('main', '__main__', program_text) # Construct input as a single single. # Parse and type check the input program. result = build.build(sources=[source], options=options, alt_lib_path=test_temp_dir) if result.errors: raise CompileError(result.errors) module = genops.build_ir(result.files['__main__'], result.types) return module.functions
def compile_modules_to_c( result: BuildResult, module_names: List[str], shared_lib_name: Optional[str], compiler_options: CompilerOptions, errors: Errors, ops: Optional[List[str]] = None) -> List[Tuple[str, str]]: """Compile Python module(s) to C that can be used from Python C extension modules.""" # Generate basic IR, with missing exception and refcount handling. file_nodes = [result.files[name] for name in module_names] literals, modules = genops.build_ir(file_nodes, result.graph, result.types, compiler_options, errors) if errors.num_errors > 0: return [] # Insert uninit checks. for _, module in modules: for fn in module.functions: insert_uninit_checks(fn) # Insert exception handling. for _, module in modules: for fn in module.functions: insert_exception_handling(fn) # Insert refcount handling. for _, module in modules: for fn in module.functions: insert_ref_count_opcodes(fn) # Format ops for debugging if ops is not None: for _, module in modules: for fn in module.functions: ops.extend(format_func(fn)) ops.append('') # Generate C code. source_paths = { module_name: result.files[module_name].path for module_name in module_names } generator = ModuleGenerator(literals, modules, source_paths, shared_lib_name, compiler_options.multi_file) return generator.generate_c_for_modules()
def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a runtime checking transformation test case.""" with use_custom_builtins( os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): expected_output = remove_comment_lines(testcase.output) program_text = '\n'.join(testcase.input) options = Options() options.use_builtins_fixtures = True options.show_traceback = True options.strict_optional = True options.python_version = (3, 6) options.export_types = True source = build.BuildSource('main', '__main__', program_text) try: # Construct input as a single single. # Parse and type check the input program. result = build.build(sources=[source], options=options, alt_lib_path=test_temp_dir) except CompileError as e: actual = e.messages else: if result.errors: actual = result.errors else: modules = genops.build_ir([result.files['__main__']], result.types) module = modules[0][1] actual = [] for fn in module.functions: if is_empty_module_top_level(fn): # Skip trivial module top levels that only return. continue actual.extend(format_func(fn)) assert_test_output(testcase, actual, 'Invalid source code output', expected_output)
def compile_modules_to_c( result: BuildResult, compiler_options: CompilerOptions, errors: Errors, groups: Groups, ) -> Tuple[ModuleIRs, List[FileContents]]: """Compile Python module(s) to the source of Python C extension modules. This generates the source code for the "shared library" module for each group. The shim modules are generated in mypyc.build. Each shared library module provides, for each module in its group, a PyCapsule containing an initialization function. Additionally, it provides a capsule containing an export table of pointers to all of the group's functions and static variables. Arguments: result: The BuildResult from the mypy front-end compiler_options: The compilation options errors: Where to report any errors encountered groups: The groups that we are compiling. See documentation of Groups type above. ops: Optionally, where to dump stringified ops for debugging. Returns the IR of the modules and a list containing the generated files for each group. """ module_names = [ source.module for group_sources, _ in groups for source in group_sources ] file_nodes = [result.files[name] for name in module_names] # Construct a map from modules to what group they belong to group_map = {} for group, lib_name in groups: for source in group: group_map[source.module] = lib_name # Generate basic IR, with missing exception and refcount handling. mapper = genops.Mapper(group_map) modules = genops.build_ir(file_nodes, result.graph, result.types, mapper, compiler_options, errors) if errors.num_errors > 0: return modules, [] # Insert uninit checks. for module in modules.values(): for fn in module.functions: insert_uninit_checks(fn) # Insert exception handling. for module in modules.values(): for fn in module.functions: insert_exception_handling(fn) # Insert refcount handling. for module in modules.values(): for fn in module.functions: insert_ref_count_opcodes(fn) source_paths = { module_name: result.files[module_name].path for module_name in module_names } names = NameGenerator([[source.module for source in sources] for sources, _ in groups]) # Generate C code for each compilation group. Each group will be # compiled into a separate extension module. ctext = [] for group_sources, group_name in groups: group_modules = [(source.module, modules[source.module]) for source in group_sources] literals = mapper.literals[group_name] generator = GroupGenerator(literals, group_modules, source_paths, group_name, group_map, names, compiler_options.multi_file) ctext.append(generator.generate_c_for_modules()) return modules, ctext
def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a data-flow analysis test case.""" with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): program_text = '\n'.join(testcase.input) options = Options() options.use_builtins_fixtures = True options.show_traceback = True options.python_version = (3, 6) options.export_types = True source = build.BuildSource('main', '__main__', program_text) try: # Construct input as a single single. # Parse and type check the input program. result = build.build(sources=[source], options=options, alt_lib_path=test_temp_dir) except CompileError as e: actual = e.messages else: if result.errors: actual = result.errors else: modules = genops.build_ir([result.files['__main__']], result.types) module = modules[0][1] assert len(module.functions) == 2, ( "Only 1 function definition expected per test case") fn = module.functions[0] actual = format_func(fn) actual = actual[actual.index('L0:'):] cfg = analysis.get_cfg(fn.blocks) args = set(reg for reg, i in fn.env.indexes.items() if i < len(fn.args)) name = testcase.name if name.endswith('_MaybeDefined'): # Forward, maybe analysis_result = analysis.analyze_maybe_defined_regs(fn.blocks, cfg, args) elif name.endswith('_Liveness'): # Backward, maybe analysis_result = analysis.analyze_live_regs(fn.blocks, cfg) elif name.endswith('_MustDefined'): # Forward, must analysis_result = analysis.analyze_must_defined_regs( fn.blocks, cfg, args, regs=fn.env.regs()) elif name.endswith('_BorrowedArgument'): # Forward, must analysis_result = analysis.analyze_borrowed_arguments(fn.blocks, cfg, args) else: assert False, 'No recognized _AnalysisName suffix in test case' actual.append('') for key in sorted(analysis_result.before.keys(), key=lambda x: (x[0].label, x[1])): pre = ', '.join(sorted(reg.name for reg in analysis_result.before[key])) post = ', '.join(sorted(reg.name for reg in analysis_result.after[key])) actual.append('%-8s %-23s %s' % ((key[0].label, key[1]), '{%s}' % pre, '{%s}' % post)) assert_test_output(testcase, actual, 'Invalid source code output')
def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a data-flow analysis test case.""" with use_custom_builtins( os.path.join(test_data_prefix, ICODE_GEN_BUILTINS), testcase): expected_output = testcase.output program_text = '\n'.join(testcase.input) options = Options() options.use_builtins_fixtures = True options.show_traceback = True source = build.BuildSource('main', '__main__', program_text) try: # Construct input as a single single. # Parse and type check the input program. result = build.build(sources=[source], options=options, alt_lib_path=test_temp_dir) except CompileError as e: actual = e.messages else: if result.errors: actual = result.errors else: module = genops.build_ir(result.files['__main__'], result.types) assert len( module.functions ) == 1, "Only 1 function definition expected per test case" fn = module.functions[0] actual = format_func(fn) actual = actual[actual.index('L0:'):] cfg = analysis.get_cfg(fn.blocks) args = set([Register(i) for i in range(len(fn.args))]) name = testcase.name if name.endswith('_MaybeDefined'): # Forward, maybe analysis_result = analysis.analyze_maybe_defined_regs( fn.blocks, cfg, args) elif name.endswith('_Liveness'): # Backward, maybe analysis_result = analysis.analyze_live_regs( fn.blocks, cfg) elif name.endswith('_MustDefined'): # Forward, must analysis_result = analysis.analyze_must_defined_regs( fn.blocks, cfg, args, num_regs=fn.env.num_regs()) elif name.endswith('_BorrowedArgument'): # Forward, must analysis_result = analysis.analyze_borrowed_arguments( fn.blocks, cfg, args) else: assert False, 'No recognized _AnalysisName suffix in test case' actual.append('') for key in sorted(analysis_result.before.keys()): pre = ', '.join(fn.env.names[reg] for reg in analysis_result.before[key]) post = ', '.join(fn.env.names[reg] for reg in analysis_result.after[key]) actual.append('%-8s %-23s %s' % (key, '{%s}' % pre, '{%s}' % post)) assert_string_arrays_equal_wildcards( expected_output, actual, 'Invalid source code output ({}, line {})'.format( testcase.file, testcase.line))