Example #1
0
 def test_backprop(self):
     X = np.array([[1, 2, 3], [2, 3, 4]])
     y = np.array([[0, 0, 1], [0, 0, 1]])
     res, a = neur.forward_prop(X, [self.theta, self.theta2])
     grads = neur.backprop(a, y, [self.theta, self.theta2], 0)
     grads_check = neur.gradient_check(X, y, [self.theta, self.theta2], neur.logistic_squared_cost_function)
     self.equalish(grads[0], grads_check[0])
     self.equalish(grads[1], grads_check[1])
Example #2
0
 def test_backprop(self):
     X = np.array([[1, 2, 3], [2, 3, 4]])
     y = np.array([[0, 0, 1], [0, 0, 1]])
     res, a = neur.forward_prop(X, [self.theta, self.theta2])
     grads = neur.backprop(a, y, [self.theta, self.theta2], 0)
     grads_check = neur.gradient_check(X, y, [self.theta, self.theta2],
                                       neur.logistic_squared_cost_function)
     self.equalish(grads[0], grads_check[0])
     self.equalish(grads[1], grads_check[1])
Example #3
0
 def test_gradient_check(self):
     X = np.array([[1, 2, 3], [2, 3, 4]])
     y = np.array([[0, 0, 1], [0, 0, 1]])
     res = neur.gradient_check(X, y, [self.theta, self.theta2],
                               neur.logistic_squared_cost_function)
     self.equalish(
         res[0],
         np.array([[0.00562904, 0.00841451, 0.01404355, 0.01967259],
                   [-0.02588239, -0.03870536, -0.06458776, -0.09047015]]))
     self.equalish(
         res[1],
         np.array([[0.5341706, 0.3233084, 0.30720236],
                   [0.4745306, 0.28720531, 0.27289721],
                   [-0.45907194, -0.2778482, -0.26400618]]))
Example #4
0
    def test_contractive_reg_gradient(self):
        X = np.array([[1, 0, 1], [1, 1, 0]])

        def cost_func(x, y, thetas, lamb):
            self.enc.encode_weights = thetas[0]
            out, a = self.enc.forward_prop(x)
            return self.enc.contractive_reg(a[1][:, 1:])

        self.enc.encode_weights = self.theta

        gradients = neur.gradient_check(X, X, [self.enc.encode_weights], cost_func)

        self.enc.encode_weights = self.theta
        out, a =    self.enc.forward_prop(X)
        grad_new =  self.enc.contractive_reg_gradient(a[1][:,1:]) # get rid of ones from argument
        self.assertTrue(((grad_new - gradients) < 0.00035).all())
Example #5
0
 def test_gradient_check(self):
     X = np.array([[1, 2, 3], [2, 3, 4]])
     y = np.array([[0, 0, 1], [0, 0, 1]])
     res = neur.gradient_check(X, y, [self.theta, self.theta2], neur.logistic_squared_cost_function)
     self.equalish(
         res[0],
         np.array(
             [[0.00562904, 0.00841451, 0.01404355, 0.01967259], [-0.02588239, -0.03870536, -0.06458776, -0.09047015]]
         ),
     )
     self.equalish(
         res[1],
         np.array(
             [
                 [0.5341706, 0.3233084, 0.30720236],
                 [0.4745306, 0.28720531, 0.27289721],
                 [-0.45907194, -0.2778482, -0.26400618],
             ]
         ),
     )