def HswishGrad(y_grad, x): """ HswishGrad Args: y_grad: x: Returns: """ shape = x.shape res0 = tvm.compute(shape, lambda *i: tvm.if_then_else(x(*i) <= -3, 0, y_grad(*i) * (2 * x(*i) + 3) / 6)) res6 = tvm.compute(shape, lambda *i: tvm.if_then_else(x(*i) >= 3, y_grad(*i), res0(*i))) return res6
def topi_nn_hsigmoid(x): """ topi hsigmoid Args: x: Returns: """ return tvm.compute( x.shape, lambda *i: tvm.if_then_else( x(*i) <= -3, 0, tvm.if_then_else(x(*i) >= 3, 1, (x(*i) + 3) / 6)))
def ReLU6Grad(y_grad, x): """ Computes Gradients of Rectified Linear 6. Args: y_grad (tvm.tensor.Tensor): Tensor of type float16, float32, gradients backpropagated to the ReLU6 op. x (tvm.tensor.Tensor): Tensor of type float16/float32, inputs that where passed to the ReLU6 op, or its outputs. Returns: tvm.tensor.Tensor, has same type and shape as x. """ shape = x.shape dtype = x.dtype zero = tvm.const(0, dtype) six = tvm.const(6, dtype) res0 = tvm.compute(shape, lambda *i: tvm.if_then_else(x(*i) >= zero, x(*i), zero)) res6 = tvm.compute(shape, lambda *i: tvm.if_then_else(x(*i) >= six, zero, res0(*i))) res = tvm.compute(shape, lambda *i: tvm.if_then_else(res6(*i) == zero, zero, y_grad(*i))) return res
def topi_nn_HSwish(x): """ topi HSwish Args: x: Returns: """ return tvm.compute( x.shape, lambda *i: tvm.if_then_else( x(*i) <= -3, 0, tvm.if_then_else(x(*i) >= 3, x(*i), x(*i) * (x(*i) + 3) / 6)))
def HsigmoidGrad(y_grad, x): """ HsigmoidGrad Args: y_grad: x: Returns: """ return tvm.compute( x.shape, lambda *i: tvm.if_then_else( x(*i) <= -3, 0, tvm.if_then_else(x(*i) >= 3, 0, y_grad(*i) / 6)))
def topi_nn_relu6(x): """topi nn relu6.""" return tvm.compute( x.shape, lambda *i: tvm.min(tvm.max(x(*i), tvm.const(0, x.dtype)), tvm.const(6, x.dtype)))