def __init__(self, typingctx, targetctx, library, args, return_type, flags, locals): # Make sure the environment is reloaded config.reload_config() typingctx.refresh() targetctx.refresh() self.typingctx = typingctx self.targetctx = _make_subtarget(targetctx, flags) self.library = library self.args = args self.return_type = return_type self.flags = flags self.locals = locals # Results of various steps of the compilation pipeline self.bc = None self.func_id = None self.func_ir = None self.func_ir_original = None # used for fallback self.lifted = None self.lifted_from = None self.typemap = None self.calltypes = None self.type_annotation = None self.metadata = {} # holds arbitrary inter-pipeline stage meta data # parfor diagnostics info, add to metadata self.parfor_diagnostics = ParforDiagnostics() self.metadata['parfor_diagnostics'] = self.parfor_diagnostics self.status = _CompileStatus( can_fallback=self.flags.enable_pyobject, can_giveup=config.COMPATIBILITY_MODE )
class BasePipeline(object): """ Stores and manages states for the compiler pipeline """ def __init__(self, typingctx, targetctx, library, args, return_type, flags, locals): # Make sure the environment is reloaded config.reload_config() typingctx.refresh() targetctx.refresh() self.typingctx = typingctx self.targetctx = _make_subtarget(targetctx, flags) self.library = library self.args = args self.return_type = return_type self.flags = flags self.locals = locals # Results of various steps of the compilation pipeline self.bc = None self.func_id = None self.func_ir = None self.func_ir_original = None # used for fallback self.lifted = None self.lifted_from = None self.typemap = None self.calltypes = None self.type_annotation = None self.metadata = {} # holds arbitrary inter-pipeline stage meta data # parfor diagnostics info, add to metadata self.parfor_diagnostics = ParforDiagnostics() self.metadata['parfor_diagnostics'] = self.parfor_diagnostics self.status = _CompileStatus( can_fallback=self.flags.enable_pyobject, can_giveup=config.COMPATIBILITY_MODE ) @contextmanager def fallback_context(self, msg): """ Wraps code that would signal a fallback to object mode """ try: yield except BaseException as e: if not self.status.can_fallback: raise else: if utils.PYVERSION >= (3,): # Clear all references attached to the traceback e = e.with_traceback(None) warnings.warn_explicit('%s: %s' % (msg, e), errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno) raise @contextmanager def giveup_context(self, msg): """ Wraps code that would signal a fallback to interpreter mode """ try: yield except BaseException as e: if not self.status.can_giveup: raise else: if utils.PYVERSION >= (3,): # Clear all references attached to the traceback e = e.with_traceback(None) warnings.warn_explicit('%s: %s' % (msg, e), errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno) raise def extract_bytecode(self, func_id): """ Extract bytecode from function """ bc = bytecode.ByteCode(func_id) if config.DUMP_BYTECODE: print(bc.dump()) return bc def compile_extra(self, func): self.func_id = bytecode.FunctionIdentity.from_function(func) try: bc = self.extract_bytecode(self.func_id) except BaseException as e: if self.status.can_giveup: self.stage_compile_interp_mode() return self.cr else: raise e self.bc = bc self.lifted = () self.lifted_from = None return self._compile_bytecode() def compile_ir(self, func_ir, lifted=(), lifted_from=None): self.func_id = func_ir.func_id self.lifted = lifted self.lifted_from = lifted_from self._set_and_check_ir(func_ir) return self._compile_ir() def stage_analyze_bytecode(self): """ Analyze bytecode and translating to Numba IR """ func_ir = translate_stage(self.func_id, self.bc) self._set_and_check_ir(func_ir) def _set_and_check_ir(self, func_ir): self.func_ir = func_ir self.nargs = self.func_ir.arg_count if not self.args and self.flags.force_pyobject: # Allow an empty argument types specification when object mode # is explicitly requested. self.args = (types.pyobject,) * self.nargs elif len(self.args) != self.nargs: raise TypeError("Signature mismatch: %d argument types given, " "but function takes %d arguments" % (len(self.args), self.nargs)) def stage_process_ir(self): ir_processing_stage(self.func_ir) def stage_preserve_ir(self): self.func_ir_original = self.func_ir.copy() def frontend_looplift(self): """ Loop lifting analysis and transformation """ loop_flags = self.flags.copy() outer_flags = self.flags.copy() # Do not recursively loop lift outer_flags.unset('enable_looplift') loop_flags.unset('enable_looplift') if not self.flags.enable_pyobject_looplift: loop_flags.unset('enable_pyobject') main, loops = transforms.loop_lifting(self.func_ir, typingctx=self.typingctx, targetctx=self.targetctx, locals=self.locals, flags=loop_flags) if loops: # Some loops were extracted if config.DEBUG_FRONTEND or config.DEBUG: for loop in loops: print("Lifting loop", loop.get_source_location()) cres = compile_ir(self.typingctx, self.targetctx, main, self.args, self.return_type, outer_flags, self.locals, lifted=tuple(loops), lifted_from=None) return cres def stage_frontend_withlift(self): """ Extract with-contexts """ main, withs = transforms.with_lifting( func_ir=self.func_ir, typingctx=self.typingctx, targetctx=self.targetctx, flags=self.flags, locals=self.locals, ) if withs: cres = compile_ir(self.typingctx, self.targetctx, main, self.args, self.return_type, self.flags, self.locals, lifted=tuple(withs), lifted_from=None, pipeline_class=type(self)) raise _EarlyPipelineCompletion(cres) def stage_objectmode_frontend(self): """ Front-end: Analyze bytecode, generate Numba IR, infer types """ self.func_ir = self.func_ir_original or self.func_ir if self.flags.enable_looplift: assert not self.lifted cres = self.frontend_looplift() if cres is not None: raise _EarlyPipelineCompletion(cres) # Fallback typing: everything is a python object self.typemap = defaultdict(lambda: types.pyobject) self.calltypes = defaultdict(lambda: types.pyobject) self.return_type = types.pyobject def stage_nopython_frontend(self): """ Type inference and legalization """ with self.fallback_context('Function "%s" failed type inference' % (self.func_id.func_name,)): # Type inference typemap, return_type, calltypes = type_inference_stage( self.typingctx, self.func_ir, self.args, self.return_type, self.locals) self.typemap = typemap self.return_type = return_type self.calltypes = calltypes with self.fallback_context('Function "%s" has invalid return type' % (self.func_id.func_name,)): legalize_return_type(self.return_type, self.func_ir, self.targetctx) def stage_generic_rewrites(self): """ Perform any intermediate representation rewrites before type inference. """ assert self.func_ir msg = ('Internal error in pre-inference rewriting ' 'pass encountered during compilation of ' 'function "%s"' % (self.func_id.func_name,)) with self.fallback_context(msg): rewrites.rewrite_registry.apply('before-inference', self, self.func_ir) def stage_nopython_rewrites(self): """ Perform any intermediate representation rewrites after type inference. """ # Ensure we have an IR and type information. assert self.func_ir assert isinstance(getattr(self, 'typemap', None), dict) assert isinstance(getattr(self, 'calltypes', None), dict) msg = ('Internal error in post-inference rewriting ' 'pass encountered during compilation of ' 'function "%s"' % (self.func_id.func_name,)) with self.fallback_context(msg): rewrites.rewrite_registry.apply('after-inference', self, self.func_ir) def stage_pre_parfor_pass(self): """ Preprocessing for data-parallel computations. """ # Ensure we have an IR and type information. assert self.func_ir preparfor_pass = PreParforPass( self.func_ir, self.type_annotation.typemap, self.type_annotation.calltypes, self.typingctx, self.flags.auto_parallel, self.parfor_diagnostics.replaced_fns ) preparfor_pass.run() def stage_parfor_pass(self): """ Convert data-parallel computations into Parfor nodes """ # Ensure we have an IR and type information. assert self.func_ir parfor_pass = ParforPass(self.func_ir, self.type_annotation.typemap, self.type_annotation.calltypes, self.return_type, self.typingctx, self.flags.auto_parallel, self.flags, self.parfor_diagnostics) parfor_pass.run() if config.WARNINGS: # check the parfor pass worked and warn if it didn't has_parfor = False for blk in self.func_ir.blocks.values(): for stmnt in blk.body: if isinstance(stmnt, Parfor): has_parfor = True break else: continue break if not has_parfor: # parfor calls the compiler chain again with a string if not self.func_ir.loc.filename == '<string>': msg = ("parallel=True was specified but no transformation" " for parallel execution was possible.") warnings.warn_explicit( msg, errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno ) def stage_inline_pass(self): """ Inline calls to locally defined closures. """ # Ensure we have an IR and type information. assert self.func_ir inline_pass = InlineClosureCallPass(self.func_ir, self.flags.auto_parallel, self.parfor_diagnostics.replaced_fns) inline_pass.run() # Remove all Dels, and re-run postproc post_proc = postproc.PostProcessor(self.func_ir) post_proc.run() if config.DEBUG or config.DUMP_IR: name = self.func_ir.func_id.func_qualname print(("IR DUMP: %s" % name).center(80, "-")) self.func_ir.dump() def stage_annotate_type(self): """ Create type annotation after type inference """ self.type_annotation = type_annotations.TypeAnnotation( func_ir=self.func_ir, typemap=self.typemap, calltypes=self.calltypes, lifted=self.lifted, lifted_from=self.lifted_from, args=self.args, return_type=self.return_type, html_output=config.HTML) if config.ANNOTATE: print("ANNOTATION".center(80, '-')) print(self.type_annotation) print('=' * 80) if config.HTML: with open(config.HTML, 'w') as fout: self.type_annotation.html_annotate(fout) def stage_dump_diagnostics(self): if self.flags.auto_parallel.enabled: if config.PARALLEL_DIAGNOSTICS: if self.parfor_diagnostics is not None: self.parfor_diagnostics.dump(config.PARALLEL_DIAGNOSTICS) else: raise RuntimeError("Diagnostics failed.") def backend_object_mode(self): """ Object mode compilation """ with self.giveup_context("Function %s failed at object mode lowering" % (self.func_id.func_name,)): if len(self.args) != self.nargs: # append missing self.args = (tuple(self.args) + (types.pyobject,) * (self.nargs - len(self.args))) return py_lowering_stage(self.targetctx, self.library, self.func_ir, self.flags) def backend_nopython_mode(self): """Native mode compilation""" msg = ("Function %s failed at nopython " "mode lowering" % (self.func_id.func_name,)) with self.fallback_context(msg): return native_lowering_stage( self.targetctx, self.library, self.func_ir, self.typemap, self.return_type, self.calltypes, self.flags, self.metadata) def _backend(self, lowerfn, objectmode): """ Back-end: Generate LLVM IR from Numba IR, compile to machine code """ if self.library is None: codegen = self.targetctx.codegen() self.library = codegen.create_library(self.func_id.func_qualname) # Enable object caching upfront, so that the library can # be later serialized. self.library.enable_object_caching() lowered = lowerfn() signature = typing.signature(self.return_type, *self.args) self.cr = compile_result( typing_context=self.typingctx, target_context=self.targetctx, entry_point=lowered.cfunc, typing_error=self.status.fail_reason, type_annotation=self.type_annotation, library=self.library, call_helper=lowered.call_helper, signature=signature, objectmode=objectmode, interpmode=False, lifted=self.lifted, fndesc=lowered.fndesc, environment=lowered.env, has_dynamic_globals=lowered.has_dynamic_globals, metadata=self.metadata, ) def stage_objectmode_backend(self): """ Lowering for object mode """ lowerfn = self.backend_object_mode self._backend(lowerfn, objectmode=True) # Warn if compiled function in object mode and force_pyobject not set if not self.flags.force_pyobject: if len(self.lifted) > 0: warn_msg = ('Function "%s" was compiled in object mode without' ' forceobj=True, but has lifted loops.' % (self.func_id.func_name,)) else: warn_msg = ('Function "%s" was compiled in object mode without' ' forceobj=True.' % (self.func_id.func_name,)) warnings.warn_explicit(warn_msg, errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno) if self.flags.release_gil: warn_msg = ("Code running in object mode won't allow parallel" " execution despite nogil=True.") warnings.warn_explicit(warn_msg, errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno) def stage_nopython_backend(self): """ Do lowering for nopython """ lowerfn = self.backend_nopython_mode self._backend(lowerfn, objectmode=False) def stage_compile_interp_mode(self): """ Just create a compile result for interpreter mode """ args = [types.pyobject] * len(self.args) signature = typing.signature(types.pyobject, *args) self.cr = compile_result(typing_context=self.typingctx, target_context=self.targetctx, entry_point=self.func_id.func, typing_error=self.status.fail_reason, type_annotation="<Interpreter mode function>", signature=signature, objectmode=False, interpmode=True, lifted=(), fndesc=None,) def stage_ir_legalization(self): raise_on_unsupported_feature(self.func_ir) def stage_cleanup(self): """ Cleanup intermediate results to release resources. """ def define_pipelines(self, pm): """Child classes override this to customize the pipeline. """ raise NotImplementedError() def add_preprocessing_stage(self, pm): """Add the preprocessing stage that analyzes the bytecode to prepare the Numba IR. """ if self.func_ir is None: pm.add_stage(self.stage_analyze_bytecode, "analyzing bytecode") pm.add_stage(self.stage_process_ir, "processing IR") def add_pre_typing_stage(self, pm): """Add any stages that go before type-inference. The current stages contain type-agnostic rewrite passes. """ if not self.flags.no_rewrites: if self.status.can_fallback: pm.add_stage(self.stage_preserve_ir, "preserve IR for fallback") pm.add_stage(self.stage_generic_rewrites, "nopython rewrites") pm.add_stage(self.stage_inline_pass, "inline calls to locally defined closures") def add_typing_stage(self, pm): """Add the type-inference stage necessary for nopython mode. """ pm.add_stage(self.stage_nopython_frontend, "nopython frontend") pm.add_stage(self.stage_annotate_type, "annotate type") def add_optimization_stage(self, pm): """Add optimization stages. """ if self.flags.auto_parallel.enabled: pm.add_stage(self.stage_pre_parfor_pass, "Preprocessing for parfors") if not self.flags.no_rewrites: pm.add_stage(self.stage_nopython_rewrites, "nopython rewrites") if self.flags.auto_parallel.enabled: pm.add_stage(self.stage_parfor_pass, "convert to parfors") def add_lowering_stage(self, pm): """Add the lowering (code-generation) stage for nopython-mode """ pm.add_stage(self.stage_nopython_backend, "nopython mode backend") def add_cleanup_stage(self, pm): """Add the clean-up stage to remove intermediate results. """ pm.add_stage(self.stage_cleanup, "cleanup intermediate results") def add_with_handling_stage(self, pm): pm.add_stage(self.stage_frontend_withlift, "Handle with contexts") def define_nopython_pipeline(self, pm, name='nopython'): """Add the nopython-mode pipeline to the pipeline manager """ pm.create_pipeline(name) self.add_preprocessing_stage(pm) self.add_with_handling_stage(pm) self.add_pre_typing_stage(pm) self.add_typing_stage(pm) self.add_optimization_stage(pm) pm.add_stage(self.stage_ir_legalization, "ensure IR is legal prior to lowering") self.add_lowering_stage(pm) pm.add_stage(self.stage_dump_diagnostics, "dump diagnostics") self.add_cleanup_stage(pm) def define_objectmode_pipeline(self, pm, name='object'): """Add the object-mode pipeline to the pipeline manager """ pm.create_pipeline(name) self.add_preprocessing_stage(pm) pm.add_stage(self.stage_objectmode_frontend, "object mode frontend") pm.add_stage(self.stage_inline_pass, "inline calls to locally defined closures") pm.add_stage(self.stage_annotate_type, "annotate type") pm.add_stage(self.stage_ir_legalization, "ensure IR is legal prior to lowering") pm.add_stage(self.stage_objectmode_backend, "object mode backend") self.add_cleanup_stage(pm) def define_interpreted_pipeline(self, pm, name="interp"): """Add the interpreted-mode (fallback) pipeline to the pipeline manager """ pm.create_pipeline(name) pm.add_stage(self.stage_compile_interp_mode, "compiling with interpreter mode") self.add_cleanup_stage(pm) def _compile_core(self): """ Populate and run compiler pipeline """ pm = _PipelineManager() self.define_pipelines(pm) pm.finalize() res = pm.run(self.status) if res is not None: # Early pipeline completion return res else: assert self.cr is not None return self.cr def _compile_bytecode(self): """ Populate and run pipeline for bytecode input """ assert self.func_ir is None return self._compile_core() def _compile_ir(self): """ Populate and run pipeline for IR input """ assert self.func_ir is not None return self._compile_core()
class BasePipeline(object): """ Stores and manages states for the compiler pipeline """ def __init__(self, typingctx, targetctx, library, args, return_type, flags, locals): # Make sure the environment is reloaded config.reload_config() typingctx.refresh() targetctx.refresh() self.typingctx = typingctx self.targetctx = _make_subtarget(targetctx, flags) self.library = library self.args = args self.return_type = return_type self.flags = flags self.locals = locals # Results of various steps of the compilation pipeline self.bc = None self.func_id = None self.func_ir = None self.func_ir_original = None # used for fallback self.lifted = None self.lifted_from = None self.typemap = None self.calltypes = None self.type_annotation = None self.metadata = {} # holds arbitrary inter-pipeline stage meta data # parfor diagnostics info, add to metadata self.parfor_diagnostics = ParforDiagnostics() self.metadata['parfor_diagnostics'] = self.parfor_diagnostics self.status = _CompileStatus( can_fallback=self.flags.enable_pyobject, can_giveup=config.COMPATIBILITY_MODE ) @contextmanager def fallback_context(self, msg): """ Wraps code that would signal a fallback to object mode """ try: yield except BaseException as e: if not self.status.can_fallback: raise else: if utils.PYVERSION >= (3,): # Clear all references attached to the traceback e = e.with_traceback(None) warnings.warn_explicit('%s: %s' % (msg, e), errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno) raise @contextmanager def giveup_context(self, msg): """ Wraps code that would signal a fallback to interpreter mode """ try: yield except BaseException as e: if not self.status.can_giveup: raise else: if utils.PYVERSION >= (3,): # Clear all references attached to the traceback e = e.with_traceback(None) warnings.warn_explicit('%s: %s' % (msg, e), errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno) raise def extract_bytecode(self, func_id): """ Extract bytecode from function """ bc = bytecode.ByteCode(func_id) if config.DUMP_BYTECODE: print(bc.dump()) return bc def compile_extra(self, func): self.func_id = bytecode.FunctionIdentity.from_function(func) try: bc = self.extract_bytecode(self.func_id) except BaseException as e: if self.status.can_giveup: self.stage_compile_interp_mode() return self.cr else: raise e self.bc = bc self.lifted = () self.lifted_from = None return self._compile_bytecode() def compile_ir(self, func_ir, lifted=(), lifted_from=None): self.func_id = func_ir.func_id self.lifted = lifted self.lifted_from = lifted_from self._set_and_check_ir(func_ir) return self._compile_ir() def stage_analyze_bytecode(self): """ Analyze bytecode and translating to Numba IR """ func_ir = translate_stage(self.func_id, self.bc) self._set_and_check_ir(func_ir) def _set_and_check_ir(self, func_ir): self.func_ir = func_ir self.nargs = self.func_ir.arg_count if not self.args and self.flags.force_pyobject: # Allow an empty argument types specification when object mode # is explicitly requested. self.args = (types.pyobject,) * self.nargs elif len(self.args) != self.nargs: raise TypeError("Signature mismatch: %d argument types given, " "but function takes %d arguments" % (len(self.args), self.nargs)) def stage_process_ir(self): ir_processing_stage(self.func_ir) def stage_preserve_ir(self): self.func_ir_original = self.func_ir.copy() def frontend_looplift(self): """ Loop lifting analysis and transformation """ loop_flags = self.flags.copy() outer_flags = self.flags.copy() # Do not recursively loop lift outer_flags.unset('enable_looplift') loop_flags.unset('enable_looplift') if not self.flags.enable_pyobject_looplift: loop_flags.unset('enable_pyobject') main, loops = transforms.loop_lifting(self.func_ir, typingctx=self.typingctx, targetctx=self.targetctx, locals=self.locals, flags=loop_flags) if loops: # Some loops were extracted if config.DEBUG_FRONTEND or config.DEBUG: for loop in loops: print("Lifting loop", loop.get_source_location()) cres = compile_ir(self.typingctx, self.targetctx, main, self.args, self.return_type, outer_flags, self.locals, lifted=tuple(loops), lifted_from=None) return cres def stage_frontend_withlift(self): """ Extract with-contexts """ main, withs = transforms.with_lifting( func_ir=self.func_ir, typingctx=self.typingctx, targetctx=self.targetctx, flags=self.flags, locals=self.locals, ) if withs: cres = compile_ir(self.typingctx, self.targetctx, main, self.args, self.return_type, self.flags, self.locals, lifted=tuple(withs), lifted_from=None, pipeline_class=type(self)) raise _EarlyPipelineCompletion(cres) def stage_objectmode_frontend(self): """ Front-end: Analyze bytecode, generate Numba IR, infer types """ self.func_ir = self.func_ir_original or self.func_ir if self.flags.enable_looplift: assert not self.lifted cres = self.frontend_looplift() if cres is not None: raise _EarlyPipelineCompletion(cres) # Fallback typing: everything is a python object self.typemap = defaultdict(lambda: types.pyobject) self.calltypes = defaultdict(lambda: types.pyobject) self.return_type = types.pyobject def stage_nopython_frontend(self): """ Type inference and legalization """ with self.fallback_context('Function "%s" failed type inference' % (self.func_id.func_name,)): # Type inference typemap, return_type, calltypes = type_inference_stage( self.typingctx, self.func_ir, self.args, self.return_type, self.locals) self.typemap = typemap self.return_type = return_type self.calltypes = calltypes with self.fallback_context('Function "%s" has invalid return type' % (self.func_id.func_name,)): legalize_return_type(self.return_type, self.func_ir, self.targetctx) def stage_generic_rewrites(self): """ Perform any intermediate representation rewrites before type inference. """ assert self.func_ir msg = ('Internal error in pre-inference rewriting ' 'pass encountered during compilation of ' 'function "%s"' % (self.func_id.func_name,)) with self.fallback_context(msg): rewrites.rewrite_registry.apply('before-inference', self, self.func_ir) def stage_nopython_rewrites(self): """ Perform any intermediate representation rewrites after type inference. """ # Ensure we have an IR and type information. assert self.func_ir assert isinstance(getattr(self, 'typemap', None), dict) assert isinstance(getattr(self, 'calltypes', None), dict) msg = ('Internal error in post-inference rewriting ' 'pass encountered during compilation of ' 'function "%s"' % (self.func_id.func_name,)) with self.fallback_context(msg): rewrites.rewrite_registry.apply('after-inference', self, self.func_ir) def stage_pre_parfor_pass(self): """ Preprocessing for data-parallel computations. """ # Ensure we have an IR and type information. assert self.func_ir preparfor_pass = PreParforPass( self.func_ir, self.type_annotation.typemap, self.type_annotation.calltypes, self.typingctx, self.flags.auto_parallel, self.parfor_diagnostics.replaced_fns ) preparfor_pass.run() def stage_parfor_pass(self): """ Convert data-parallel computations into Parfor nodes """ # Ensure we have an IR and type information. assert self.func_ir parfor_pass = ParforPass(self.func_ir, self.type_annotation.typemap, self.type_annotation.calltypes, self.return_type, self.typingctx, self.flags.auto_parallel, self.flags, self.parfor_diagnostics) parfor_pass.run() if config.WARNINGS: # check the parfor pass worked and warn if it didn't has_parfor = False for blk in self.func_ir.blocks.values(): for stmnt in blk.body: if isinstance(stmnt, Parfor): has_parfor = True break else: continue break if not has_parfor: # parfor calls the compiler chain again with a string if not self.func_ir.loc.filename == '<string>': msg = ("parallel=True was specified but no transformation" " for parallel execution was possible.") warnings.warn_explicit( msg, errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno ) def stage_inline_pass(self): """ Inline calls to locally defined closures. """ # Ensure we have an IR and type information. assert self.func_ir inline_pass = InlineClosureCallPass(self.func_ir, self.flags.auto_parallel, self.parfor_diagnostics.replaced_fns) inline_pass.run() # Remove all Dels, and re-run postproc post_proc = postproc.PostProcessor(self.func_ir) post_proc.run() if config.DEBUG or config.DUMP_IR: name = self.func_ir.func_id.func_qualname print(("IR DUMP: %s" % name).center(80, "-")) self.func_ir.dump() def stage_annotate_type(self): """ Create type annotation after type inference """ self.type_annotation = type_annotations.TypeAnnotation( func_ir=self.func_ir, typemap=self.typemap, calltypes=self.calltypes, lifted=self.lifted, lifted_from=self.lifted_from, args=self.args, return_type=self.return_type, html_output=config.HTML) if config.ANNOTATE: print("ANNOTATION".center(80, '-')) print(self.type_annotation) print('=' * 80) if config.HTML: with open(config.HTML, 'w') as fout: self.type_annotation.html_annotate(fout) def stage_dump_diagnostics(self): if self.flags.auto_parallel.enabled: if config.PARALLEL_DIAGNOSTICS: if self.parfor_diagnostics is not None: self.parfor_diagnostics.dump(config.PARALLEL_DIAGNOSTICS) else: raise RuntimeError("Diagnostics failed.") def backend_object_mode(self): """ Object mode compilation """ with self.giveup_context("Function %s failed at object mode lowering" % (self.func_id.func_name,)): if len(self.args) != self.nargs: # append missing self.args = (tuple(self.args) + (types.pyobject,) * (self.nargs - len(self.args))) return py_lowering_stage(self.targetctx, self.library, self.func_ir, self.flags) def backend_nopython_mode(self): """Native mode compilation""" msg = ("Function %s failed at nopython " "mode lowering" % (self.func_id.func_name,)) with self.fallback_context(msg): return native_lowering_stage( self.targetctx, self.library, self.func_ir, self.typemap, self.return_type, self.calltypes, self.flags, self.metadata) def _backend(self, lowerfn, objectmode): """ Back-end: Generate LLVM IR from Numba IR, compile to machine code """ if self.library is None: codegen = self.targetctx.codegen() self.library = codegen.create_library(self.func_id.func_qualname) # Enable object caching upfront, so that the library can # be later serialized. self.library.enable_object_caching() lowered = lowerfn() signature = typing.signature(self.return_type, *self.args) self.cr = compile_result( typing_context=self.typingctx, target_context=self.targetctx, entry_point=lowered.cfunc, typing_error=self.status.fail_reason, type_annotation=self.type_annotation, library=self.library, call_helper=lowered.call_helper, signature=signature, objectmode=objectmode, interpmode=False, lifted=self.lifted, fndesc=lowered.fndesc, environment=lowered.env, metadata=self.metadata, ) def stage_objectmode_backend(self): """ Lowering for object mode """ lowerfn = self.backend_object_mode self._backend(lowerfn, objectmode=True) # Warn if compiled function in object mode and force_pyobject not set if not self.flags.force_pyobject: if len(self.lifted) > 0: warn_msg = ('Function "%s" was compiled in object mode without' ' forceobj=True, but has lifted loops.' % (self.func_id.func_name,)) else: warn_msg = ('Function "%s" was compiled in object mode without' ' forceobj=True.' % (self.func_id.func_name,)) warnings.warn_explicit(warn_msg, errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno) if self.flags.release_gil: warn_msg = ("Code running in object mode won't allow parallel" " execution despite nogil=True.") warnings.warn_explicit(warn_msg, errors.NumbaWarning, self.func_id.filename, self.func_id.firstlineno) def stage_nopython_backend(self): """ Do lowering for nopython """ lowerfn = self.backend_nopython_mode self._backend(lowerfn, objectmode=False) def stage_compile_interp_mode(self): """ Just create a compile result for interpreter mode """ args = [types.pyobject] * len(self.args) signature = typing.signature(types.pyobject, *args) self.cr = compile_result(typing_context=self.typingctx, target_context=self.targetctx, entry_point=self.func_id.func, typing_error=self.status.fail_reason, type_annotation="<Interpreter mode function>", signature=signature, objectmode=False, interpmode=True, lifted=(), fndesc=None,) def stage_ir_legalization(self): raise_on_unsupported_feature(self.func_ir) def stage_cleanup(self): """ Cleanup intermediate results to release resources. """ def define_pipelines(self, pm): """Child classes override this to customize the pipeline. """ raise NotImplementedError() def add_preprocessing_stage(self, pm): """Add the preprocessing stage that analyzes the bytecode to prepare the Numba IR. """ if self.func_ir is None: pm.add_stage(self.stage_analyze_bytecode, "analyzing bytecode") pm.add_stage(self.stage_process_ir, "processing IR") def add_pre_typing_stage(self, pm): """Add any stages that go before type-inference. The current stages contain type-agnostic rewrite passes. """ if not self.flags.no_rewrites: if self.status.can_fallback: pm.add_stage(self.stage_preserve_ir, "preserve IR for fallback") pm.add_stage(self.stage_generic_rewrites, "nopython rewrites") pm.add_stage(self.stage_inline_pass, "inline calls to locally defined closures") def add_typing_stage(self, pm): """Add the type-inference stage necessary for nopython mode. """ pm.add_stage(self.stage_nopython_frontend, "nopython frontend") pm.add_stage(self.stage_annotate_type, "annotate type") def add_optimization_stage(self, pm): """Add optimization stages. """ if self.flags.auto_parallel.enabled: pm.add_stage(self.stage_pre_parfor_pass, "Preprocessing for parfors") if not self.flags.no_rewrites: pm.add_stage(self.stage_nopython_rewrites, "nopython rewrites") if self.flags.auto_parallel.enabled: pm.add_stage(self.stage_parfor_pass, "convert to parfors") def add_lowering_stage(self, pm): """Add the lowering (code-generation) stage for nopython-mode """ pm.add_stage(self.stage_nopython_backend, "nopython mode backend") def add_cleanup_stage(self, pm): """Add the clean-up stage to remove intermediate results. """ pm.add_stage(self.stage_cleanup, "cleanup intermediate results") def add_with_handling_stage(self, pm): pm.add_stage(self.stage_frontend_withlift, "Handle with contexts") def define_nopython_pipeline(self, pm, name='nopython'): """Add the nopython-mode pipeline to the pipeline manager """ pm.create_pipeline(name) self.add_preprocessing_stage(pm) self.add_with_handling_stage(pm) self.add_pre_typing_stage(pm) self.add_typing_stage(pm) self.add_optimization_stage(pm) pm.add_stage(self.stage_ir_legalization, "ensure IR is legal prior to lowering") self.add_lowering_stage(pm) pm.add_stage(self.stage_dump_diagnostics, "dump diagnostics") self.add_cleanup_stage(pm) def define_objectmode_pipeline(self, pm, name='object'): """Add the object-mode pipeline to the pipeline manager """ pm.create_pipeline(name) self.add_preprocessing_stage(pm) pm.add_stage(self.stage_objectmode_frontend, "object mode frontend") pm.add_stage(self.stage_inline_pass, "inline calls to locally defined closures") pm.add_stage(self.stage_annotate_type, "annotate type") pm.add_stage(self.stage_ir_legalization, "ensure IR is legal prior to lowering") pm.add_stage(self.stage_objectmode_backend, "object mode backend") self.add_cleanup_stage(pm) def define_interpreted_pipeline(self, pm, name="interp"): """Add the interpreted-mode (fallback) pipeline to the pipeline manager """ pm.create_pipeline(name) pm.add_stage(self.stage_compile_interp_mode, "compiling with interpreter mode") self.add_cleanup_stage(pm) def _compile_core(self): """ Populate and run compiler pipeline """ pm = _PipelineManager() self.define_pipelines(pm) pm.finalize() res = pm.run(self.status) if res is not None: # Early pipeline completion return res else: assert self.cr is not None return self.cr def _compile_bytecode(self): """ Populate and run pipeline for bytecode input """ assert self.func_ir is None return self._compile_core() def _compile_ir(self): """ Populate and run pipeline for IR input """ assert self.func_ir is not None return self._compile_core()