Ejemplo n.º 1
0
    def test_sigmoid_prime(self):
        """Test the sigmoid_prime function for specific inputs."""
        m = np.array([[1,2], [-3,4]])
        m_c = m.copy()
        eps = 1e-6

        der = neural.sigmoid_prime(m)
        exp = (neural.sigmoid(m+eps) - neural.sigmoid(m-eps)) / ( 2*eps)

        diff = np.sum(der - exp)
        self.assertAlmostEqual(diff, 0, delta=1e-6)

        m_unchanged = (m==m_c).all()
        self.assertTrue(m_unchanged)    
Ejemplo n.º 2
0
    def test_sigmoid_prime(self):
        """Test the sigmoid_prime function for specific inputs."""
        m = np.array([[1, 2], [-3, 4]])
        m_c = m.copy()
        eps = 1e-6

        der = neural.sigmoid_prime(m)
        exp = (neural.sigmoid(m + eps) - neural.sigmoid(m - eps)) / (2 * eps)

        diff = np.sum(der - exp)
        self.assertAlmostEqual(diff, 0, delta=1e-6)

        m_unchanged = (m == m_c).all()
        self.assertTrue(m_unchanged)
def feed_forward(X, weights):
    """
    1. calculate the dot product of X                 (N, 3)
       and the weights of the first layer  (2, 3) --> (N, 2)
    2. apply the sigmoid function on the result       (N, 2) return this
    3. append an extra 1 for the bias to the result   (N, 3)
    4. calculate the dot product of X
       and the weights of the second layer (1, 3) --> (N, 1)
    5. apply the sigmoid function on the result       (N, 1) return this
    6. return intermediate results of the sigmoids
    """
    d1 = np.dot(X, weights[0])
    output1 = sigmoid(d1)
    input2 = np.hstack([output1, np.ones((output1.shape[0], 1))])
    d2 = np.dot(input2, weights[1])
    output2 = sigmoid(d2)
    return output1, output2
Ejemplo n.º 4
0
    def test_sigmoid(self):
        """Test sigmoid function for specific cases."""
        m = np.array([[3, 4], [-1, -2], [-3.5, 4.5]])
        m_c = m.copy()

        output = neural.sigmoid(m)
        self.assertEqual(output.shape, m_c.shape)

        for row in range(m_c.shape[0]):
            for col in range(m_c.shape[1]):
                self.assertEqual(output[row,col], 1/(1+math.exp(-m_c[row, col])))

        self.assertTrue((m==m_c).all())
Ejemplo n.º 5
0
    def test_signal_to_activation(self):
        """Test the activation from specific signal."""
        m = np.array([[1,2],[-3,4]])
        m_c = m.copy()

        act = neural.signal_to_activation(m)
        shape_ok = act.shape[0] == m.shape[0]+1 and act.shape[1] == m.shape[1]
        bias_ok = (act[0,:] == np.ones((1,m.shape[1]))).all()
        nonbias_ok = (act[1:,:] == neural.sigmoid(m)).all()

        self.assertTrue(shape_ok)
        self.assertTrue(bias_ok)
        self.assertTrue(nonbias_ok)
        self.assertTrue((m==m_c).all())
Ejemplo n.º 6
0
    def test_signal_to_activation(self):
        """Test the activation from specific signal."""
        m = np.array([[1, 2], [-3, 4]])
        m_c = m.copy()

        act = neural.signal_to_activation(m)
        shape_ok = act.shape[0] == m.shape[0] + 1 and act.shape[1] == m.shape[1]
        bias_ok = (act[0, :] == np.ones((1, m.shape[1]))).all()
        nonbias_ok = (act[1:, :] == neural.sigmoid(m)).all()

        self.assertTrue(shape_ok)
        self.assertTrue(bias_ok)
        self.assertTrue(nonbias_ok)
        self.assertTrue((m == m_c).all())
Ejemplo n.º 7
0
    def test_sigmoid(self):
        """Test sigmoid function for specific cases."""
        m = np.array([[3, 4], [-1, -2], [-3.5, 4.5]])
        m_c = m.copy()

        output = neural.sigmoid(m)
        self.assertEqual(output.shape, m_c.shape)

        for row in range(m_c.shape[0]):
            for col in range(m_c.shape[1]):
                self.assertEqual(output[row, col],
                                 1 / (1 + math.exp(-m_c[row, col])))

        self.assertTrue((m == m_c).all())
import numpy as np
from neural_network import sigmoid, feed_forward, loss
from neural_network import X, get_weights

weights = get_weights()

# test for sigmoid function
a = np.array([-10.0, -1.0, 0.0, 1.0, 10.0])
expected = np.array([0.0, 0.27, 0.5, 0.73, 1.0])
assert np.all(sigmoid(a).round(2) == expected)

# test the weights
assert weights[0].shape == (3, 2)
assert weights[1].shape == (3, 1)

# test the feed-forward step
out1, out2 = feed_forward(X, weights)
assert out1.shape == (50, 2)
assert out2.shape == (50, 1)

# test the log-loss function
ytrue = np.array([0.0, 0.0, 1.0, 1.0])
ypred = np.array([0.01, 0.99, 0.01, 0.99])
expected = np.array([0.01, 4.61, 4.61, 0.01])
assert np.all(loss(ytrue, ypred).round(2) == expected)

# test the feed-forward step with values that give a known result
Xref = np.array([[1.0, 2.0, 1.0]])
wref = [
    np.array([[1.0, -1.0], [2.0, -2.0], [0.0, 0.0]]),
    np.array([[1.0], [-1.0], [0.5]])
Ejemplo n.º 9
0
 def test_sigmoid(self):
     z = neural_network.getZ(self.w, self.x, self.b)
     self.assertEqual(neural_network.sigmoid(z), (1 / (1 + np.exp(-4))))
     print "sigmoid"
Ejemplo n.º 10
0
for epoch in range(epochMax):
    for i, example in enumerate(train_features):
        # Forward pass
        features = train_features[i]
        # print("X: " + str(features.shape))
        ground_truth = float(train_labels[i])
        outputs = []
        # Compute output
        for n in range(total_layer_count):
            if n == 0:  # Size hidden_node_count x 1
                product = np.transpose(weights[n]).dot(features)
            else:
                product = np.transpose(weights[n]).dot(outputs[n - 1])
            inputs = product + np.transpose(biases[n])
            inputs = np.transpose(inputs)
            neural_network.sigmoid(np.array(inputs))  # Activation
            outputs.append(inputs)
            # print("Outputs [" + str(n) + "]: " + str(outputs[n].shape))
        output = float(outputs[-1])  # Output of network
        # Calculate error
        error = ground_truth - output
        if i % 100 == 0:  # Print error in intervals
            print("Error: " + str(error))
        if -0.05 < error < 0.05:  # Stop training once low error achieved
            break
        # Backpropagation
        deltas = []
        # Calculate deltas
        for n in reversed(range(total_layer_count)):
            if n == hidden_layer_count:  # Scalar
                delta = error * (output * (1 - output))
Ejemplo n.º 11
0
 def test_sigmoid(self):
     z = neural_network.getZ(self.w, self.x, self.b)
     self.assertEqual(neural_network.sigmoid(z), (1/(1 + np.exp(-4))))
     print "sigmoid"