Exemplo n.º 1
0
    def test_train(self):
        np.random.seed(0)
        ws = list()
        ws.append(Variable(np.random.rand(2, 3)))
        ws.append(Variable(np.random.rand(3, 1)))

        data = Variable(np.array([[0, 0], [0, 1], [1, 0], [1, 1]]))
        target = Variable(np.array([[0], [1], [0], [1]]))

        expected = [
            6.210704901174886, 1.4434549528818301, 0.809644434846779,
            0.7585458232291216, 0.7437140298400363, 0.7316218334889659,
            0.7198939685708763, 0.708293823629362, 0.6967362454336858,
            0.6851547179015602
        ]
        log = []

        for i in range(10):
            t = matmul(data, ws[0])
            t = sigmoid2(t)
            pred = matmul(t, ws[1])
            diff = (pred - target)
            loss = sum(diff * diff)
            loss.backward()

            for w in ws:
                w.data -= w.grad * 0.1
                w.cleargrad()
            #print(loss.data)
            log.append(loss.data)

        self.assertTrue(np.allclose(np.array(log), np.array(expected)))
Exemplo n.º 2
0
 def test_forward(self):
     x = Variable(np.random.rand(10))
     y = F.sum(x)
     expected = np.sum(x.data)
     self.assertTrue(np.allclose(y.data, expected))
Exemplo n.º 3
0
 def test_backward3(self):
     x_data = np.random.rand(2, 3)
     f = lambda x: F.sum(x, axis=1)
     check_backward(f, x_data, testcase=self)
Exemplo n.º 4
0
 def test_backward3(self):
     x_data = np.random.rand(10, 20, 20)
     f = lambda x: F.sum(x, axis=(0, 2), keepdims=True)
     check_backward(f, x_data, testcase=self)
Exemplo n.º 5
0
 def test_backward(self):
     x_data = np.random.rand(10)
     f = lambda x: F.sum(x)
     check_backward(f, x_data, testcase=self)
Exemplo n.º 6
0
 def test_forward2(self):
     x = Variable(np.random.rand(10, 20, 30))
     y = F.sum(x, axis=1)
     expected = np.sum(x.data, axis=1)
     self.assertTrue(np.allclose(y.data, expected))
Exemplo n.º 7
0
def mean_squared_error(x, t):
    diff = x - t
    total = sum(diff * diff)
    N = x.data.shape[0]
    return total / N
Exemplo n.º 8
0
def training_loss(weights):
    # Training loss is the negative log-likelihood of the training labels.
    preds = logistic_predictions(weights, inputs)
    label_probabilities = preds * targets + (1 - preds) * (1 - targets)
    return -sum(log(label_probabilities))
Exemplo n.º 9
0
 def logprob(model, x, t, noise_scale=0.1):
     noise_scale = np.array(noise_scale)
     pred = model(x)
     logp = D.Normal(t, noise_scale).log_prob(pred)
     return -1 * F.sum(logp)
Exemplo n.º 10
0
def log_gaussian(model, scale):
    t = 0
    normal = D.Normal(0, scale)
    for p in model.params():
        t += F.sum(normal.log_prob(p))
    return -1 * t