def is_leap_year(builder, year_val): """ Return a predicate indicating whether *year_val* (offset by 1970) is a leap year. """ actual_year = builder.add(year_val, Constant(DATETIME64, 1970)) multiple_of_4 = cgutils.is_null( builder, builder.and_(actual_year, Constant(DATETIME64, 3))) not_multiple_of_100 = cgutils.is_not_null( builder, builder.srem(actual_year, Constant(DATETIME64, 100))) multiple_of_400 = cgutils.is_null( builder, builder.srem(actual_year, Constant(DATETIME64, 400))) return builder.and_(multiple_of_4, builder.or_(not_multiple_of_100, multiple_of_400))
def _unbox_native_field(typ, obj, field_name: str, c): ret_ptr = cgutils.alloca_once(c.builder, c.context.get_value_type(typ)) is_error_ptr = cgutils.alloca_once_value(c.builder, cgutils.false_bit) fail_obj = c.context.get_constant_null(typ) with local_return(c.builder) as ret: fail_blk = c.builder.append_basic_block("fail") with c.builder.goto_block(fail_blk): c.builder.store(cgutils.true_bit, is_error_ptr) c.builder.store(fail_obj, ret_ptr) ret() field_obj = c.pyapi.object_getattr_string(obj, field_name) with cgutils.if_unlikely(c.builder, cgutils.is_null(c.builder, field_obj)): c.builder.branch(fail_blk) field_native = c.unbox(typ, field_obj) c.pyapi.decref(field_obj) with cgutils.if_unlikely(c.builder, field_native.is_error): c.builder.branch(fail_blk) c.builder.store(cgutils.false_bit, is_error_ptr) c.builder.store(field_native.value, ret_ptr) return NativeValue(c.builder.load(ret_ptr), is_error=c.builder.load(is_error_ptr))
def _python_set_to_native(typ, obj, c, size, setptr, errorptr): """ Construct a new native set from a Python set. """ # Allocate a new native set ok, inst = setobj.SetInstance.allocate_ex(c.context, c.builder, typ, size) with c.builder.if_else(ok, likely=True) as (if_ok, if_not_ok): with if_ok: # Traverse Python set and unbox objects into native set typobjptr = cgutils.alloca_once_value( c.builder, ir.Constant(c.pyapi.pyobj, None)) with c.pyapi.set_iterate(obj) as loop: itemobj = loop.value # Mandate that objects all have the same exact type typobj = c.pyapi.get_type(itemobj) expected_typobj = c.builder.load(typobjptr) with c.builder.if_else(cgutils.is_null(c.builder, expected_typobj), likely=False) as (if_first, if_not_first): with if_first: # First iteration => store item type c.builder.store(typobj, typobjptr) with if_not_first: # Otherwise, check item type type_mismatch = c.builder.icmp_signed( '!=', typobj, expected_typobj) with c.builder.if_then(type_mismatch, likely=False): c.builder.store(cgutils.true_bit, errorptr) c.pyapi.err_set_string( "PyExc_TypeError", "can't unbox heterogeneous set") loop.do_break() # XXX we don't call native cleanup for each set element, # since that would require keeping track # of which unboxings have been successful. native = c.unbox(typ.dtype, itemobj) with c.builder.if_then(native.is_error, likely=False): c.builder.store(cgutils.true_bit, errorptr) inst.add_pyapi(c.pyapi, native.value, do_resize=False) if typ.reflected: inst.parent = obj # Associate meminfo pointer with the Python object for later reuse. with c.builder.if_then(c.builder.not_(c.builder.load(errorptr)), likely=False): c.pyapi.object_set_private_data(obj, inst.meminfo) inst.set_dirty(False) c.builder.store(inst.value, setptr) with if_not_ok: c.builder.store(cgutils.true_bit, errorptr) # If an error occurred, drop the whole native set with c.builder.if_then(c.builder.load(errorptr)): c.context.nrt.decref(c.builder, typ, inst.value)
def unbox_record(typ, obj, c): buf = c.pyapi.alloca_buffer() ptr = c.pyapi.extract_record_data(obj, buf) is_error = cgutils.is_null(c.builder, ptr) ltyp = c.context.get_value_type(typ) val = c.builder.bitcast(ptr, ltyp) def cleanup(): c.pyapi.release_buffer(buf) return NativeValue(val, cleanup=cleanup, is_error=is_error)
def eh_check(self, builder): """Check if an exception is raised""" ctx = self._context cc = ctx.call_conv # Inspect the excinfo argument on the function trystatus = cc.check_try_status(builder) excinfo = trystatus.excinfo has_raised = builder.not_(cgutils.is_null(builder, excinfo)) with builder.if_then(has_raised): self.eh_end_try(builder) return has_raised
def allocate_ex(cls, context, builder, list_type, nitems): """ Allocate a ListInstance with its storage. Return a (ok, instance) tuple where *ok* is a LLVM boolean and *instance* is a ListInstance object (the object's contents are only valid when *ok* is true). """ intp_t = context.get_value_type(types.intp) if isinstance(nitems, int): nitems = ir.Constant(intp_t, nitems) payload_type = context.get_data_type(types.ListPayload(list_type)) payload_size = context.get_abi_sizeof(payload_type) itemsize = get_itemsize(context, list_type) # Account for the fact that the payload struct contains one entry payload_size -= itemsize ok = cgutils.alloca_once_value(builder, cgutils.true_bit) self = cls(context, builder, list_type, None) # Total allocation size = <payload header size> + nitems * itemsize allocsize, ovf = cgutils.muladd_with_overflow( builder, nitems, ir.Constant(intp_t, itemsize), ir.Constant(intp_t, payload_size), ) with builder.if_then(ovf, likely=False): builder.store(cgutils.false_bit, ok) with builder.if_then(builder.load(ok), likely=True): meminfo = context.nrt.meminfo_new_varsize_dtor( builder, size=allocsize, dtor=self.get_dtor() ) with builder.if_else(cgutils.is_null(builder, meminfo), likely=False) as ( if_error, if_ok, ): with if_error: builder.store(cgutils.false_bit, ok) with if_ok: self._list.meminfo = meminfo self._list.parent = context.get_constant_null(types.pyobject) self._payload.allocated = nitems self._payload.size = ir.Constant(intp_t, 0) # for safety self._payload.dirty = cgutils.false_bit # Zero the allocated region self.zfill(self.size.type(0), nitems) return builder.load(ok), self
def box_function_type(typ, val, c): typ = typ.get_precise() sfunc = cgutils.create_struct_proxy(typ)(c.context, c.builder, value=val) pyaddr_ptr = cgutils.alloca_once(c.builder, c.pyapi.pyobj) raw_ptr = c.builder.inttoptr(sfunc.pyaddr, c.pyapi.pyobj) with c.builder.if_then(cgutils.is_null(c.builder, raw_ptr), likely=False): cstr = f"first-class function {typ} parent object not set" c.pyapi.err_set_string("PyExc_MemoryError", cstr) c.builder.ret(c.pyapi.get_null_object()) c.builder.store(raw_ptr, pyaddr_ptr) cfunc = c.builder.load(pyaddr_ptr) c.pyapi.incref(cfunc) return cfunc
def check_element_type(nth, itemobj, expected_typobj): typobj = nth.typeof(itemobj) # Check if *typobj* is NULL with c.builder.if_then( cgutils.is_null(c.builder, typobj), likely=False, ): c.builder.store(cgutils.true_bit, errorptr) loop.do_break() # Mandate that objects all have the same exact type type_mismatch = c.builder.icmp_signed('!=', typobj, expected_typobj) with c.builder.if_then(type_mismatch, likely=False): c.builder.store(cgutils.true_bit, errorptr) c.pyapi.err_format( "PyExc_TypeError", "can't unbox heterogeneous list: %S != %S", expected_typobj, typobj, ) c.pyapi.decref(typobj) loop.do_break() c.pyapi.decref(typobj)
def lower_get_wrapper_address(context, builder, func, sig, failure_mode='return_exc'): """Low-level call to _get_wrapper_address(func, sig). When calling this function, GIL must be acquired. """ pyapi = context.get_python_api(builder) # Get the cfunc wrapper address. The code below trusts that the # function numba.function._get_wrapper_address exists and can be # called with two arguments. However, if an exception is raised in # the function, then it will be caught and propagated to the # caller. modname = context.insert_const_string(builder.module, __name__) numba_mod = pyapi.import_module_noblock(modname) numba_func = pyapi.object_getattr_string(numba_mod, '_get_wrapper_address') pyapi.decref(numba_mod) sig_obj = pyapi.unserialize(pyapi.serialize_object(sig)) addr = pyapi.call_function_objargs(numba_func, (func, sig_obj)) if failure_mode != 'ignore': with builder.if_then(cgutils.is_null(builder, addr), likely=False): # _get_wrapper_address has raised an exception, propagate it # to the caller. if failure_mode == 'return_exc': context.call_conv.return_exc(builder) elif failure_mode == 'return_null': builder.ret(pyapi.get_null_object()) else: raise NotImplementedError(failure_mode) # else the caller will handle addr == NULL return addr # new reference or NULL
def _emit_python_wrapper(self, llvm_module): # Figure out the Python C API module creation function, and # get a LLVM function for it. create_module_fn = llvm_module.add_function( *self.module_create_definition) create_module_fn.linkage = lc.LINKAGE_EXTERNAL # Define a constant string for the module name. mod_name_const = self.context.insert_const_string( llvm_module, self.module_name) mod_def_base_init = lc.Constant.struct(( lt._pyobject_head_init, # PyObject_HEAD lc.Constant.null(self.m_init_ty), # m_init lc.Constant.null(lt._llvm_py_ssize_t), # m_index lc.Constant.null(lt._pyobject_head_p), # m_copy )) mod_def_base = llvm_module.add_global_variable(mod_def_base_init.type, '.module_def_base') mod_def_base.initializer = mod_def_base_init mod_def_base.linkage = lc.LINKAGE_INTERNAL method_array = self._emit_method_array(llvm_module) mod_def_init = lc.Constant.struct(( mod_def_base_init, # m_base mod_name_const, # m_name lc.Constant.null(self._char_star), # m_doc lc.Constant.int(lt._llvm_py_ssize_t, -1), # m_size method_array, # m_methods lc.Constant.null(self.inquiry_ty), # m_reload lc.Constant.null(self.traverseproc_ty), # m_traverse lc.Constant.null(self.inquiry_ty), # m_clear lc.Constant.null(self.freefunc_ty) # m_free )) # Define a constant string for the module name. mod_def = llvm_module.add_global_variable(mod_def_init.type, '.module_def') mod_def.initializer = mod_def_init mod_def.linkage = lc.LINKAGE_INTERNAL # Define the module initialization function. mod_init_fn = llvm_module.add_function(*self.module_init_definition) entry = mod_init_fn.append_basic_block('Entry') builder = lc.Builder(entry) pyapi = self.context.get_python_api(builder) mod = builder.call( create_module_fn, (mod_def, lc.Constant.int(lt._int32, sys.api_version))) # Test if module has been created correctly. # (XXX for some reason comparing with the NULL constant fails llvm # with an assertion in pydebug mode) with builder.if_then(cgutils.is_null(builder, mod)): builder.ret(NULL.bitcast(mod_init_fn.type.pointee.return_type)) env_array = self._emit_environment_array(llvm_module, builder, pyapi) envgv_array = self._emit_envgvs_array(llvm_module, builder, pyapi) ret = self._emit_module_init_code(llvm_module, builder, mod, method_array, env_array, envgv_array) if ret is not None: with builder.if_then(cgutils.is_not_null(builder, ret)): # Init function errored out builder.ret(lc.Constant.null(mod.type)) builder.ret(mod) self.dll_exports.append(mod_init_fn.name)
def is_null(self, obj): return cgutils.is_null(self.builder, obj)
def unbox_meminfo_pointer(typ, obj, c): res = c.pyapi.nrt_meminfo_from_pyobject(obj) errored = cgutils.is_null(c.builder, res) return NativeValue(res, is_error=errored)
def _prepare_call_to_object_mode(context, builder, pyapi, func, signature, args): mod = builder.module bb_core_return = builder.append_basic_block('ufunc.core.return') # Call to # PyObject* ndarray_new(int nd, # npy_intp *dims, /* shape */ # npy_intp *strides, # void* data, # int type_num, # int itemsize) ll_int = context.get_value_type(types.int32) ll_intp = context.get_value_type(types.intp) ll_intp_ptr = Type.pointer(ll_intp) ll_voidptr = context.get_value_type(types.voidptr) ll_pyobj = context.get_value_type(types.pyobject) fnty = Type.function( ll_pyobj, [ll_int, ll_intp_ptr, ll_intp_ptr, ll_voidptr, ll_int, ll_int]) fn_array_new = mod.get_or_insert_function(fnty, name="numba_ndarray_new") # Convert each llarray into pyobject error_pointer = cgutils.alloca_once(builder, Type.int(1), name='error') builder.store(cgutils.true_bit, error_pointer) # The PyObject* arguments to the kernel function object_args = [] object_pointers = [] for i, (arg, argty) in enumerate(zip(args, signature.args)): # Allocate NULL-initialized slot for this argument objptr = cgutils.alloca_once(builder, ll_pyobj, zfill=True) object_pointers.append(objptr) if isinstance(argty, types.Array): # Special case arrays: we don't need full-blown NRT reflection # since the argument will be gone at the end of the kernel arycls = context.make_array(argty) array = arycls(context, builder, value=arg) zero = Constant.int(ll_int, 0) # Extract members of the llarray nd = Constant.int(ll_int, argty.ndim) dims = builder.gep(array._get_ptr_by_name('shape'), [zero, zero]) strides = builder.gep(array._get_ptr_by_name('strides'), [zero, zero]) data = builder.bitcast(array.data, ll_voidptr) dtype = np.dtype(str(argty.dtype)) # Prepare other info for reconstruction of the PyArray type_num = Constant.int(ll_int, dtype.num) itemsize = Constant.int(ll_int, dtype.itemsize) # Call helper to reconstruct PyArray objects obj = builder.call(fn_array_new, [nd, dims, strides, data, type_num, itemsize]) else: # Other argument types => use generic boxing obj = pyapi.from_native_value(argty, arg) builder.store(obj, objptr) object_args.append(obj) obj_is_null = cgutils.is_null(builder, obj) builder.store(obj_is_null, error_pointer) cgutils.cbranch_or_continue(builder, obj_is_null, bb_core_return) # Call ufunc core function object_sig = [types.pyobject] * len(object_args) status, retval = context.call_conv.call_function(builder, func, types.pyobject, object_sig, object_args) builder.store(status.is_error, error_pointer) # Release returned object pyapi.decref(retval) builder.branch(bb_core_return) # At return block builder.position_at_end(bb_core_return) # Release argument objects for objptr in object_pointers: pyapi.decref(builder.load(objptr)) innercall = status.code return innercall, builder.load(error_pointer)
def box_COO(typ: COOType, val: "some LLVM thing", c) -> COO: ret_ptr = cgutils.alloca_once(c.builder, c.pyapi.pyobj) fail_obj = c.pyapi.get_null_object() coo = cgutils.create_struct_proxy(typ)(c.context, c.builder, value=val) with local_return(c.builder) as ret: data_obj = c.box(typ.data_type, coo.data) with cgutils.if_unlikely(c.builder, cgutils.is_null(c.builder, data_obj)): c.builder.store(fail_obj, ret_ptr) ret() coords_obj = c.box(typ.coords_type, coo.coords) with cgutils.if_unlikely(c.builder, cgutils.is_null(c.builder, coords_obj)): c.pyapi.decref(data_obj) c.builder.store(fail_obj, ret_ptr) ret() shape_obj = c.box(typ.shape_type, coo.shape) with cgutils.if_unlikely(c.builder, cgutils.is_null(c.builder, shape_obj)): c.pyapi.decref(coords_obj) c.pyapi.decref(data_obj) c.builder.store(fail_obj, ret_ptr) ret() fill_value_obj = c.box(typ.fill_value_type, coo.fill_value) with cgutils.if_unlikely(c.builder, cgutils.is_null(c.builder, fill_value_obj)): c.pyapi.decref(shape_obj) c.pyapi.decref(coords_obj) c.pyapi.decref(data_obj) c.builder.store(fail_obj, ret_ptr) ret() class_obj = c.pyapi.unserialize(c.pyapi.serialize_object(COO)) with cgutils.if_unlikely(c.builder, cgutils.is_null(c.builder, class_obj)): c.pyapi.decref(shape_obj) c.pyapi.decref(coords_obj) c.pyapi.decref(data_obj) c.pyapi.decref(fill_value_obj) c.builder.store(fail_obj, ret_ptr) ret() args = c.pyapi.tuple_pack([coords_obj, data_obj, shape_obj]) c.pyapi.decref(shape_obj) c.pyapi.decref(coords_obj) c.pyapi.decref(data_obj) with cgutils.if_unlikely(c.builder, cgutils.is_null(c.builder, args)): c.pyapi.decref(fill_value_obj) c.pyapi.decref(class_obj) c.builder.store(fail_obj, ret_ptr) ret() kwargs = c.pyapi.dict_pack([("fill_value", fill_value_obj)]) c.pyapi.decref(fill_value_obj) with cgutils.if_unlikely(c.builder, cgutils.is_null(c.builder, kwargs)): c.pyapi.decref(class_obj) c.builder.store(fail_obj, ret_ptr) ret() c.builder.store(c.pyapi.call(class_obj, args, kwargs), ret_ptr) c.pyapi.decref(class_obj) c.pyapi.decref(args) c.pyapi.decref(kwargs) return c.builder.load(ret_ptr)
def _lower_call_ObjModeDispatcher(self, fnty, expr, signature): self.init_pyapi() # Acquire the GIL gil_state = self.pyapi.gil_ensure() # Fix types argnames = [a.name for a in expr.args] argtypes = [self.typeof(a) for a in argnames] argvalues = [self.loadvar(a) for a in argnames] for v, ty in zip(argvalues, argtypes): # Because .from_native_value steal the reference self.incref(ty, v) argobjs = [ self.pyapi.from_native_value(atyp, aval, self.env_manager) for atyp, aval in zip(argtypes, argvalues) ] # Make Call entry_pt = fnty.dispatcher.compile(tuple(argtypes)) callee = self.context.add_dynamic_addr( self.builder, id(entry_pt), info="with_objectmode", ) ret_obj = self.pyapi.call_function_objargs(callee, argobjs) has_exception = cgutils.is_null(self.builder, ret_obj) with self.builder.if_else(has_exception) as (then, orelse): # Handles exception # This branch must exit the function with then: # Clean arg for obj in argobjs: self.pyapi.decref(obj) # Release the GIL self.pyapi.gil_release(gil_state) # Return and signal exception self.call_conv.return_exc(self.builder) # Handles normal return with orelse: # Fix output value native = self.pyapi.to_native_value( fnty.dispatcher.output_types, ret_obj, ) output = native.value # Release objs self.pyapi.decref(ret_obj) for obj in argobjs: self.pyapi.decref(obj) # cleanup output if callable(native.cleanup): native.cleanup() # Release the GIL self.pyapi.gil_release(gil_state) # Error during unboxing with self.builder.if_then(native.is_error): self.call_conv.return_exc(self.builder) return output
def early_exit_if_null(builder, stack, obj): return early_exit_if(builder, stack, cgutils.is_null(builder, obj))