Exemplo n.º 1
0
    def test_numerical_gradient_checking(self):
        label, image = next(mnist.read())
        ninput = [pixel / 255 for row in image for pixel in row]
        expected = [1 if i == label else 0 for i in range(10)]
        nnet = NeuralNetwork([784, 16, 16, 10])

        epsilon = 1e-5
        numgrad = [np.empty(wmatrix.shape) for wmatrix in nnet.weight]

        for k, wmatrix in enumerate(nnet.weight):
            for i, w in np.ndenumerate(wmatrix):
                wmatrix[i] = w - epsilon
                nnet.feedforward(ninput)
                a = nnet.get_error(expected)
                wmatrix[i] = w + epsilon
                nnet.feedforward(ninput)
                b = nnet.get_error(expected)
                numgrad[k][i] = (b - a) / 2 * epsilon
                wmatrix[i] = w
        error_gradient = nnet.get_error_gradient(expected)

        unit = lambda v: v / norm(v) if (v != 0).any() else np.zeros(v.shape)

        for k in range(len(nnet.weight)):
            ag = error_gradient[k]
            ng = numgrad[k]
            print(f"custom = {norm(unit(ag) - unit(ng))}")
            print(
                f"derived from cs231 = {norm(unit(ag) * norm(ng) - ng) / max(norm(ag), norm(ng))}"
            )
Exemplo n.º 2
0
def backpropagation_main():
    label, image = next(mnist.read())
    ninput = [pixel / 255 for row in image for pixel in row]
    expected = [1 if i == label else 0 for i in range(10)]

    nnet = NeuralNetwork(DLAYERS, params=None)
    # nnet = NeuralNetwork(DLAYERS, params=load_params())
    for i in range(1000000000000):
        guess = nnet.feedforward(ninput)
        cost = nnet.get_error(expected)
        print(f"[{i + 1}] cost = {cost}, guess = {guess}")
        try:
            nnet.backpropagate(expected)
        except KeyboardInterrupt:
            break
    guess = nnet.feedforward(ninput)
    cost = nnet.get_error(expected)
    print(f"[{i + 1}] cost = {cost}")
    save_params(nnet.params)