def mk_primitive_op(self, func: Expr, args, output_type) -> Expr: cc_key = compile_engine.CCacheKey(func, self.tgt) hash = tvm.ir.structural_hash(func) name = f"op_{hash}" if not get_global_func(name, allow_missing=True): jit_func = self.engine.jit(cc_key, self.tgt) register_func(name, jit_func) return PackedCall(name, args, [x.checked_type for x in args], output_type)
def create_op_call(self, op: Function, relay_args, py_args): """Lowers the passed primitive function, registers it in TVM's global compiler, and produces a call to the lowered function in the generated Python code.""" # compile the function and register globally cc_key = compile_engine.CCacheKey(op, self.tgt) func_hash = tvm.ir.structural_hash(op) op_name = "_lowered_op_{}".format(func_hash) if not tvm.get_global_func(op_name, allow_missing=True): jitted = self.engine.jit(cc_key, self.tgt) tvm.register_func(op_name, jitted) def convert_input(py_input, arg_type): """Use the types of the function arguments to determine whether we expect a tensor or tuple (returns list of inputs to the lowered op call)""" # equivalent: input.data if isinstance(arg_type, relay.TensorType): return [py_input] assert isinstance(arg_type, relay.TupleType) # convert each input.fields[i] ret = [] for i in range(len(arg_type.fields)): ret += convert_input( ast.Subscript(py_input, ast.Index(Num(i)), Load()), arg_type.fields[i]) return ret def convert_output(ret_type): """Use the function return type to produce auxiliary variables to store outputs. Returns ([assignments of output vars], [extra arguments to pass to op call], expression collecting output)""" if isinstance(ret_type, relay.TensorType): output_var_name = self.generate_var_name("_out") output_var = Name(output_var_name, Load()) shape = ast.Tuple( [Num(dim) for dim in ret_type.concrete_shape], Load()) # create a new NDArray of the right shape and dtype assign_output = Assign( [Name(output_var_name, Store())], self.create_call("nd.array", [ self.create_call("numpy.empty", [shape, Str(ret_type.dtype)]) ]), ) return ([assign_output], [output_var], output_var) assert isinstance(ret_type, relay.TupleType) assignments = [] extra_args = [] fields = [] for t in ret_type.fields: inner_assignments, inner_args, inner_output = convert_output(t) assignments += inner_assignments extra_args += inner_args fields.append(inner_output) fields = [ast.List(fields, Load())] return (assignments, extra_args, self.create_call("_container.tuple_object", fields)) # create a function to wrap the call of the lowered op and return # a call to that function wrap_name = self.generate_function_name("_{}_wrapper".format(op_name)) wrap_args = [ self.generate_var_name("_arg_{}".format(i)) for i in range(len(py_args)) ] inner_call_args = [] for i in range(len(py_args)): inner_call_args += convert_input(Name(wrap_args[i], Load()), relay_args[i].checked_type) output_assignments, aux_args, output = convert_output( op.checked_type.ret_type) # equiv: _op = tvm.get_global_func(op_name) op_var = self.generate_var_name("_op") op_call = self.create_call("tvm.get_global_func", [Str(op_name)]) op_assign = Assign([Name(op_var, Store())], op_call) # equiv: _op(args) inner_call = self.create_call(op_var, inner_call_args + aux_args) body = output_assignments + [ op_assign, ast.Expr(inner_call), Return(output) ] wrap_def = self.create_def(wrap_name, wrap_args, body) return wrap_def, self.create_call(wrap_name, py_args)