コード例 #1
0
ファイル: test_multilayer.py プロジェクト: spkanung/neural
class TestMultiLayer(unittest.TestCase):
    """
    test a multilayer network (check gradients)
    """
    def setUp(self):
        """
        Set up a small sequential network
        """
        self.layer1 = FullLayer(5, 10)
        self.relu1 = ReluLayer()
        self.layer2 = FullLayer(10, 2)
        self.softmax = SoftMaxLayer()
        self.loss = CrossEntropyLayer()

        self.model = Sequential(
            (self.layer1,
             self.relu1,
             self.layer2,
             self.softmax),
            self.loss
            )

    def test_forward(self):
        """
        Test forward pass with some fake input
        """
        # make some fake input
        x = np.array([[0.5, 0.2, 0.1, 0.3, 0.7],
                      [0.3, 0.1, 0.05, 0.8, 0.9]])
        y = np.array([[0, 1],
                      [1, 0]])

        # test layers individually
        y1 = self.layer1.forward(x)
        relu = self.relu1.forward(y1)
        y2 = self.layer2.forward(relu)
        soft = self.softmax.forward(y2)
        loss = self.loss.forward(soft, y)

        # test sequential model
        y_model = self.model.forward(x, y)

        self.assertAlmostEquals(loss, y_model)

    def test_backward(self):
        """
        Test the backward function using the numerical gradient
        """
        # make some fake input
        x = np.array([[0.5, 0.2, 0.1, 0.3, 0.7],
                      [0.3, 0.1, 0.05, 0.8, 0.9],
                  ])
        y = np.array([[0, 1],
                      [1, 0]])

        out = self.model.forward(x, y)
        grads = self.model.backward()

        # test some gradients at the input
        h = 0.001
        for i in range(x.shape[0]):
            for j in range(x.shape[1]):
                new_x = np.copy(x)
                new_x[i, j] += h

                out2 = self.model.forward(new_x, y)
                
                diff = (out2 - out) / h
                print "######"
                print diff
                print grads[i, j]
                print "######"
                self.assertTrue(np.abs(diff - grads[i,j]) < 0.001)
コード例 #2
0
                           ReluLayer(),
                           MaxPoolLayer(size=2),
                           FlattenLayer(),
                           FullLayer(n_i=12*12*32,n_o=7),
                           SoftMaxLayer()),
                   loss=CrossEntropyLayer())

myNet.load()
"""
pred = myNet.predict(val_x)
accuracy = np.mean(pred == val_y)
print('At learning rate = '+str(lr))
print('Validation Accuracy of Convolutional Neural Network = '+str(accuracy))
"""

forw = myNet.forward(test_x)
pred = myNet.predict(test_x)
accuracy = np.mean(pred == test_y)
print('At learning rate = '+str(lr))
print('Testing Accuracy of Convolutional Neural Network = '+str(accuracy))

ty = np.argmax(test_y,axis=1)
prediction = np.argmax(pred,axis=1)
###Confusion MAtrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(ty, prediction)
print(cm)

"""
###Precision Recall
from sklearn.metrics import precision_recall_curve