def equal(input1, input2): """ check whether input1 equals to input2. Args: input1 (tvm.tensor.Tensor): Tensor. input2 (tvm.tensor.Tensor): Tensor. Returns: tvm.tensor.Tensor. If input1 equal to input2 return True, else return False. """ shape1 = [x.value for x in input1.shape] shape2 = [x.value for x in input2.shape] vc_util.check_shape(shape1) vc_util.check_shape(shape2) shape1, shape2, shape = produce_shapes(shape1, shape2) vc_util.elemwise_dtype_check(input1.dtype, input2.dtype) dtype = input1.dtype # get equal compute t_value = _akg.tvm.compute(shape, lambda *indice: _akg.tvm.const(1, dtype), "T") f_value = _akg.tvm.compute(shape, lambda *indice: _akg.tvm.const(0, dtype), "F") input1_bro = _akg.topi.broadcast_to(input1, shape) input2_bro = _akg.topi.broadcast_to(input2, shape) c_out = _akg.tvm.compute(shape, lambda *indice: _akg.tvm.expr.Select(input1_bro[indice] == input2_bro[indice], t_value[indice], f_value[indice]), name="C") res = _akg.tvm.compute(shape, lambda *indice: c_out(*indice).astype("bool"), name="res") return res
def tile(data, multiples): """ Repeats the data in the specified dimensions according to the multiples. Args: data (tvm.tensor.Tensor): Tensor. multiples (Union[list, tuple]): Elements must be int. The number of repetitions. Returns: tvm.tensor.Tensor, has the same dtype as data. """ vc_util.check_shape(data.shape) vc_util.check_int_list(multiples, "multiples") output = _akg.topi.tile(data, multiples) return output
def cast(data, dst_type): """ cast data to target type. Args: data (tvm.tensor.Tensor): Tensor to be casted. dst_type (str): target cast type. Returns: tvm.tensor.Tensor, type is dst_type. """ vc_util.check_shape(data.shape) out = _akg.topi.cast(data, dst_type) return out
def sub(data1, data2): """ Computes data1 - data2 elementwise, broadcast is supported. Args: data1 (tvm.tensor.Tensor): Tensor. data2 (tvm.tensor.Tensor): Tensor of same type as data1, if shape(data2) != shape(data1), broadcast will happen. Returns: tvm.tensor.Tensor, subtracted result, with same type as input tensors and broadcasted shape of data1 and data2. """ vc_util.elemwise_dtype_check(data1.dtype, data2.dtype) vc_util.check_shape(data1.shape) vc_util.check_shape(data2.shape) vc_util.auto_broadcast_check(data1.shape, data2.shape) res = _akg.topi.subtract(data1, data2) return res
def logical_or(input1, input2): """ Compute logical_or of input1 and input2. Args: input1 (tvm.tensor.Tensor): Tensor. input2 (tvm.tensor.Tensor): Tensor. Returns: tvm.tensor.Tensor. LogicalOr of input1 and input2. """ vc_util.elemwise_dtype_check(input1.dtype, input2.dtype) shape1 = [x.value for x in input1.shape] shape2 = [x.value for x in input2.shape] vc_util.check_shape(shape1) vc_util.check_shape(shape2) res = _akg.topi.logical_or(input1, input2) return res
def sum_value(inputs, axis=None, keepdims=False): """ Compute the sum of elements across dimensions of a tensor. Args: inputs (tvm.tensor.Tensor): Tensor. axis (Union[list, tuple, int, None]): If the list or tuple is empty, the axis equal to None. keepdims (bool): If keepdims equal to True, the result shape length is same to input shape length. Returns: tvm.tensor.Tensor, has same type as input. If keepdims is True, all reduced dimensions are retained with length 1, else these reduced axis will be eliminate. """ axis = ft_util.refine_reduce_axis(inputs, axis) vc_util.check_shape(inputs.shape) if not axis: output = _akg.topi.identity(inputs) else: output = _akg.topi.sum(inputs, axis=axis, keepdims=keepdims) return output
def mul(l_input, r_input): """ Calculate x * y element-wise. Note: mul supports broadcasting. Args: l_input (tvm.tensor.Tensor): Tensor. r_input (tvm.tensor.Tensor): Tensor. Returns: tvm.tensor.Tensor, has the same type as l_input and r_input. """ shape1 = [x.value for x in l_input.shape] shape2 = [x.value for x in r_input.shape] vc_util.check_shape(shape1) vc_util.check_shape(shape2) vc_util.auto_broadcast_check(shape1, shape2) vc_util.elemwise_dtype_check(l_input.dtype, r_input.dtype) output = _akg.topi.multiply(l_input, r_input) return output