def test_log_softmax():
    ndim = 2
    shape = (SMALL_Y, LARGE_X)
    axis = np.random.randint(0, ndim)
    data = np.random.uniform(-2, 2, size=shape)
    sym = mx.sym.log_softmax(axis=axis-ndim)
    check_symbolic_forward(sym, [data], [np.log(np_softmax(data, axis=axis)+1e-20)])
def test_sigmoid():
    def fsigmoid(a):
        return np.divide(1.0, (1.0 + np.exp(-a)))
    shape = (SMALL_Y, LARGE_X)
    x = mx.symbol.Variable("x")
    y = mx.sym.sigmoid(x)
    xa = np.random.uniform(low=-1.0,high=1.0,size=shape)
    ya = fsigmoid(xa)
    check_symbolic_forward(y, [xa], [ya])
def test_relu():
    def frelu(x):
        return np.maximum(x, 0.0)
    def frelu_grad(x):
        return 1.0 * (x > 0.0)
    shape = (SMALL_Y, LARGE_X)
    x = mx.symbol.Variable("x")
    y = mx.sym.relu(x)
    xa = np.random.uniform(low=-1.0,high=1.0,size=shape)
    eps = 1e-4
    xa[abs(xa) < eps] = 1.0
    ya = frelu(xa)
    ga = frelu_grad(xa)
    check_symbolic_forward(y, [xa], [ya])