Ejemplo n.º 1
0
    def testSaveAndLoad(self):
        nn = neuralnet.NeuralNet(2, [2, 2], neuralnet.Sigmoid())
        nn.layers[0][0].weights=[0.01, 0.02]
        nn.layers[0][0].bias=0.03
        nn.layers[0][1].weights=[0.04, 0.05]
        nn.layers[0][1].bias=0.06
        nn.layers[1][0].weights=[0.07, 0.08]
        nn.layers[1][0].bias=0.09
        nn.layers[1][1].weights=[0.10, 0.11]
        nn.layers[1][1].bias=0.12

        tmpfile = tempfile.mkstemp()[1]
        nn.save(tmpfile)

        nn2 = neuralnet.NeuralNet(1, [1, 1], neuralnet.ReLu())
        nn2.load(tmpfile)

        # This should give a more readable error when there are differences
        self.assertEqual(nn.__str__(), nn2.__str__())

        # This ensure the previous test didn't miss any important thing
        self.assertEqual(nn2.activation.name(), nn.activation.name())
        self.assertEqual(nn2.average_gradient, nn.average_gradient)
        for i in range(0, 2):
            for j in range(0, 2):
                for k in range(0, 2):
                    self.assertAlmostEqual(nn.layers[i][j].weights[k], nn2.layers[i][j].weights[k])
                self.assertAlmostEqual(nn.layers[i][j].bias, nn2.layers[i][j].bias)
Ejemplo n.º 2
0
 def testSigmoid(self):
     sigmoid = neuralnet.Sigmoid()
     self.assertAlmostEqual(0.5, sigmoid.value(0))
     self.assertAlmostEqual(0.26894, sigmoid.value(-1), 4)
     self.assertAlmostEqual(0.73106, sigmoid.value(1), 4)
     self.assertAlmostEqual(0, sigmoid.value(-710))
     self.assertAlmostEqual(1, sigmoid.value(710))
     self.assertAlmostEqual(0, sigmoid.value(-1234567890))
     self.assertAlmostEqual(1, sigmoid.value(1234567890))
Ejemplo n.º 3
0
 def testTrainAndPredictOnXor(self):
     dataset = [
         # Both negatives
         ([-0.5, -0.5], 1),
         ([-0.5, -0.3], 1),
         ([-0.5, -0.2], 1),
         ([-0.3, -0.5], 1),
         ([-0.3, -0.3], 1),
         ([-0.3, -0.2], 1),
         ([-0.2, -0.5], 1),
         ([-0.2, -0.3], 1),
         ([-0.2, -0.2], 1),
         # First negative, second positive
         ([-0.5, 0.5], 0),
         ([-0.5, 0.3], 0),
         ([-0.5, 0.2], 0),
         ([-0.3, 0.5], 0),
         ([-0.3, 0.3], 0),
         ([-0.3, 0.2], 0),
         ([-0.2, 0.5], 0),
         ([-0.2, 0.3], 0),
         ([-0.2, 0.2], 0),
         # Both positives
         ([0.5, 0.5], 1),
         ([0.5, 0.3], 1),
         ([0.5, 0.2], 1),
         ([0.3, 0.5], 1),
         ([0.3, 0.3], 1),
         ([0.3, 0.2], 1),
         ([0.2, 0.5], 1),
         ([0.2, 0.3], 1),
         ([0.2, 0.2], 1),
         # First positive, second negative
         ([0.5, -0.5], 0),
         ([0.5, -0.3], 0),
         ([0.5, -0.2], 0),
         ([0.3, -0.5], 0),
         ([0.3, -0.3], 0),
         ([0.3, -0.2], 0),
         ([0.2, -0.5], 0),
         ([0.2, -0.3], 0),
         ([0.2, -0.2], 0),
     ]
     examples = [a[0] for a in dataset]
     labels = [a[1] for a in dataset]
     nn = neuralnet.NeuralNet(2, [2, 2], neuralnet.Sigmoid())
     nn.train(20*20*20, 10, 1.0, examples, labels)
     print("")
     print(nn.evaluate([0.4, 0.4]))   # 1
     print(nn.evaluate([-0.3, 0.3]))  # 0
     print(nn.evaluate([-0.3, -0.4])) # 1
     print(nn.evaluate([0.4, -0.4]))  # 0
     print("")
     self.assertEqual(1, nn.predict([0.4, 0.4]))
     self.assertEqual(0, nn.predict([-0.3, 0.3]))
     self.assertEqual(1, nn.predict([-0.3, -0.4]))
     self.assertEqual(0, nn.predict([0.4, -0.4]))
Ejemplo n.º 4
0
 def testSigmoidDerivative(self):
     sigmoid = neuralnet.Sigmoid()
     self.assertAlmostEqual(0.19661, sigmoid.derivative(-1), 4)
     self.assertAlmostEqual(0.25, sigmoid.derivative(0))
     self.assertAlmostEqual(0.19661, sigmoid.derivative(1), 4)
     self.assertAlmostEqual(0, sigmoid.derivative(-710))
     self.assertAlmostEqual(0, sigmoid.derivative(710))
     self.assertAlmostEqual(0, sigmoid.derivative(-1234567890))
     self.assertAlmostEqual(0, sigmoid.derivative(1234567890))
Ejemplo n.º 5
0
    def testTrainOnSimpleExample(self):
        # Simple example that only depends on the first parameter.
        # A linear separation would be enough but it has the benefit of training fast :-)
        """ Created with:
        import random
        print(random.random()*4, random.random()*5, 0) for i in range(0, 5)
        print(5+random.random()*4, random.random()*5, 1) for i in range(0, 5)
        """
        dataset = [
            ([2.7810836, 2.550537003],   0),
            ([1.465489372, 2.362125076], 0),
            ([3.396561688, 4.400293529], 0),
            ([1.38807019, 1.850220317],  0),
            ([3.06407232, 3.005305973],  0),
            ([7.627531214, 2.759262235], 1),
            ([5.332441248, 2.088626775], 1),
            ([6.922596716, 1.77106367],  1),
            ([8.675418651, -0.242068655],1),
            ([7.673756466, 3.508563011], 1),
        ]
        examples = [a[0] for a in dataset]
        labels = [a[1] for a in dataset]

        nn = neuralnet.NeuralNet(2, [2, 2], neuralnet.Sigmoid())
        # This empirically proves to be good parameters to train on this
        nn.train(10*20*20, 1, 1.0, examples, labels)
        # TODO: This should work like this too:
        #nn.train(20*20, 10, 1.0, examples, labels)

        """
        print("")
        print(nn.evaluate([2, 5]))
        print(nn.evaluate([2.5, 2.5]))
        print(nn.evaluate([3, 0]))
        print(nn.evaluate([6, 0]))
        print(nn.evaluate([7.5, 2.5]))
        print(nn.evaluate([9, 5]))
        print("")
        """
        self.assertEqual(0, nn.predict([2, 5]))
        self.assertEqual(0, nn.predict([2.5, 2.5]))
        # This usually fails pretty badly on this one which is not necessarily surprising: there's no example close to it
        #self.assertEqual(0, nn.predict([3, 0]))
        self.assertEqual(1, nn.predict([6, 0]))
        self.assertEqual(1, nn.predict([7.5, 2.5]))
        self.assertEqual(1, nn.predict([9, 5]))
Ejemplo n.º 6
0
def test_network(filename):
    test_labels, test_images, test_examples = load_data(
        TEST_LABELS, TEST_IMAGES, None)
    labels = set(test_labels)
    confusion = [[0] * len(labels) for i in range(len(labels))]
    nn = neuralnet.NeuralNet(784, [10],
                             neuralnet.Sigmoid(),
                             average_gradient=False)
    nn.load(filename)
    score = evaluate_network(nn,
                             test_examples,
                             test_labels,
                             test_images,
                             verbose=False,
                             confusion=confusion)
    print("Score: %s" % score)
    show_confusion(confusion)
Ejemplo n.º 7
0
def reverse_evaluate(filename):
    nn = neuralnet.NeuralNet(0, [], neuralnet.Sigmoid())
    nn.load(filename)
    inputs = len(nn.layers[-1])
    layers = [len(l) for l in reversed(nn.layers[:-1])]
    layers.append(len(nn.layers[0][0].weights))
    rev_nn = neuralnet.NeuralNet(inputs, layers, nn.activation)
    for li, l in enumerate(nn.layers):
        rl = rev_nn.layers[-li - 1]
        for ni, n in enumerate(l):
            rn = rl[ni]
            rn.weights = [0] * len(n.weights)
            for wi, w in enumerate(n.weights):
                rn.weights[wi] = w
            rn.bias = -n.bias
    results = []
    for i in range(inputs):
        inp = [0] * inputs
        inp[i] = 1
        result = rev_nn.evaluate(inp, for_training=False)
        results.append(result)
    return results
Ejemplo n.º 8
0
        print("ERROR: %s" % error)
        print("")
    print("Syntaxe: %s <action> <file>" % sys.argv[0])
    print("  where action can be 'train' or 'test'")
    print("  for 'train', file is a configuration file")
    print("  for 'test', file is a network file")
    sys.exit(-1)


if __name__ == "__main__":
    if len(sys.argv) != 3:
        usage("Invalid number of arguments")

    if sys.argv[1] == 'train':
        train_network(sys.argv[2])
    elif sys.argv[1] == 'test':
        test_network(sys.argv[2])
    elif sys.argv[1] == 'reverse':
        results = reverse_evaluate(sys.argv[2])
        nn = neuralnet.NeuralNet(0, [], neuralnet.Sigmoid())
        nn.load(sys.argv[2])
        for i, r in enumerate(results):
            print("")
            print("This is a typical %s:" % i)
            img = mnist.Img(28, 28, r)
            print(mnist.image_to_string(img))
            print("Recognized as a %s: %s" % (nn.predict(
                r, for_training=False), nn.evaluate(r, for_training=False)))
    else:
        usage("Unknown action '%s'" % sys.argv[1])