def test_tensor_mul(): a = 0.5 * sn.ones((3, 4), requires_grad=True) a = a * 2.0 b = sn.random((3, 1)) c = b * a d = sn.random((1, 4)) d = d * c e = d.sum() e.backward()
def test_tensor_div(): a = 0.5 / sn.ones((3, 4), requires_grad=True) a = a / 2.0 b = sn.random((3, 1)) c = b / a d = sn.random((1, 4)) d = d / c e = d.sum() e.backward()
def test_cross_entropy_layer(): x = sn.random((3, 4), requires_grad=True) y = sn.zeros((3, 4)) y.set_values(np.array([[0, 0, 0, 1], [0, 1, 0, 0], [1, 0, 0, 0]])) c = CrossEntropyLayer() loss = c(x, y) loss.backward()
def test_tensor_sub(): a = sn.zeros((3, 4), requires_grad=True) a = a - 1.0 a = 2.0 - a b = sn.ones((3, 1)) b = b - a c = sn.ones((1, 4)) c = c - b d = sn.random((3, 4)) d = d - c e = d.sum() e.backward()
def test_tensor_add(): a = sn.zeros((3, 4), requires_grad=True) a = a + 1.0 a = 2.0 + a b = sn.ones((3, 1)) b = b + a c = sn.ones((1, 4)) c = c + b d = sn.random((3, 4)) d = d + c e = d.sum() e.backward()
def test_run_on_gpu(): x = sn.random((3, 4), device="cuda", requires_grad=True) y = x * 2 + 1 z = sn.sum(y) z = z * 2 z.backward()
def __init__(self, features, samples): self.initial_x = sn.random((samples, features), requires_grad=False) self.initial_y = sn.random((samples, 1), requires_grad=False)
def test_mse_layer(): x = sn.random((10, 3), requires_grad=True) y = sn.random((10, 3)) mse = MSELayer() loss = mse(x, y) loss.backward()
def test_relu_layer(): r = SigmoidLayer() x = sn.random((3, 4), requires_grad=True) x = r(x) loss = x.sum() loss.backward()
def test_tanh_layer(): t = TanhLayer() x = sn.random((3, 4), requires_grad=True) x = t(x) loss = x.sum() loss.backward()
def test_sigmoid_layer(): s = SigmoidLayer() x = sn.random((3, 4), requires_grad=True) x = s(x) loss = x.sum() loss.backward()
def test_linear_layer(): l1 = LinearLayer(input_nodes=3, output_nodes=1) x = sn.random((1, 3)) x = l1(x) loss = x.sum() loss.backward()
def test_tensor_cross_entropy(): a = sn.random((3, 4), requires_grad=True) b = sn.zeros((3, 4)) b.set_values(np.array([[0, 0, 0, 1], [0, 1, 0, 0], [1, 0, 0, 0]])) c = sn.cross_entropy(a, b) c.backward()
def test_tensor_mse(): a = sn.random((3, 4), requires_grad=True) b = sn.random((3, 4), requires_grad=True) c = sn.mse(a, b) c.backward()
def test_tensor_relu(): a = sn.random((3, 4), requires_grad=True) b = sn.relu(a) b.set_retain_grad() c = b.sum() c.backward()