Ejemplo n.º 1
0
def step(context, seq_len):
    slen = int(seq_len)

    training_data = examples_drawn_from_distributions(
        number_of_examples=200, seq_len=slen, create_example=create_example)
    context.training_data = PreloadSource(training_data)

    test_data = examples_drawn_from_distributions(
        number_of_examples=50, seq_len=slen, create_example=create_example)
    context.test_data = PreloadSource(test_data)
Ejemplo n.º 2
0
    def _load_mnist_examples(self):
        mnist.download_dataset()

        train_data = mnist.get_training_data()
        test_data = mnist.get_test_data()

        dataset_size = self._config['dataset_size']
        if dataset_size:
            train_size = dataset_size
            test_size = dataset_size
        else:
            train_size = len(train_data)
            test_size = len(test_data)

        self._data_src = PreloadSource(train_data[:train_size])
        self._test_data_src = PreloadSource(test_data[:test_size])
Ejemplo n.º 3
0
 def train(self, pixels_to_categories, nepochs=1):
     examples = self.prepare_train_examples(pixels_to_categories)
     cost_func = cost_functions.CrossEntropyCost(self._nnet)
     gd = gradient_descent.StochasticGradientDescent(
         self._nnet, cost_function=cost_func, learning_rate=0.1)
     self._nnet.randomize_parameters()
     gd.train(data_src=PreloadSource(examples), nepochs=nepochs)
 def setUp(self):
     x = np.array([5, 2], float)
     y = np.array([0.25, 0, 1], float)
     self.examples = PreloadSource(([x], [y]))
     nnet = NetFactory.create_neural_net(sizes=[2, 3, 3])
     self.nnet = nnet
     self.Descent = GradientDescent
     self.cost_function = QuadraticCost(self.nnet)
Ejemplo n.º 5
0
    def test_quadratic_cost(self):
        inputs = [np.array([0.7, 0.6, 0.1], float), np.array([1, 0, 0], float)]
        outputs = [np.array([0, 0.5], float), np.array([0, 0], float)]
        quadracost = cost_functions.QuadraticCost(neural_net=self.net)

        src = PreloadSource((inputs, outputs))
        c = quadracost.get_cost(data_src=src)

        self.assertAlmostEqual(c, 0.75 / 4.0, places=3)
Ejemplo n.º 6
0
def squared_sin_data_set():
    def f(x):
        return math.sin(x)**2

    return PreloadSource(
        helpers.generate_data(f=f,
                              start_value=0,
                              end_value=3.14,
                              step_value=0.1))
Ejemplo n.º 7
0
    def test_get_cost_initial(self):
        nnet = NetFactory.create_neural_net(sizes=[1, 1, 1])

        xes = [np.array([-10], float), np.array([100], float)]
        ys = [np.array([0.5], float), np.array([0.75], float)]

        examples = PreloadSource((xes, ys))
        cost_func = cost_functions.QuadraticCost(nnet)
        cost = cost_func.get_cost(examples)
        self.assertAlmostEqual(cost, 1.0 / 64, places=4)
 def setUp(self):
     x = np.array([5, 2], float)
     y = np.array([0.25, 0, 1], float)
     self.examples = PreloadSource(([x], [y]))
     nnet = NetFactory.create_neural_net(sizes=[2, 3, 3])
     nnet.randomize_parameters()
     self.nnet = nnet
     cost_func = QuadraticCost(nnet)
     self.grad_descent = GradientDescent(neural_net=nnet,
                                         cost_function=cost_func)
Ejemplo n.º 9
0
    def test_compute_gradients_with_cross_entropy_cost(self):
        nnet = NetFactory.create_neural_net(sizes=[4, 2, 10])
        nnet.randomize_parameters()

        cost_func = cost_functions.CrossEntropyCost(neural_net=nnet)
        examples = helpers.generate_random_examples(10, 4, 10)
        calculator = BackPropagationBasedCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        numerical_calculator = NumericalCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        w_grad1, b_grad1 = calculator.compute_gradients()
        w_grad2, b_grad2 = numerical_calculator.compute_gradients()

        self.compare_grads(grad1=w_grad1, grad2=w_grad2)
        self.compare_grads(grad1=b_grad1, grad2=b_grad2)
Ejemplo n.º 10
0
 def test_training_epoch_2_examples(self):
     cost_func = QuadraticCost(self.nnet)
     self.examples = PreloadSource(
         ([np.array([5, 2], float),
           np.array([5, 22], float)],
          [np.array([0.25, 0, 1], float),
           np.array([0.5, 1, 0], float)]))
     cost_before = cost_func.get_cost(self.examples)
     self.grad_descent.training_epoch(data_src=self.examples)
     cost_after = cost_func.get_cost(self.examples)
     self.assertLess(cost_after, cost_before)
Ejemplo n.º 11
0
    def test_gives_correct_output_on_training_data(self):
        nnet = NetFactory.create_neural_net(sizes=[1, 1, 1])
        cost_func = QuadraticCost(neural_net=nnet)
        gd = GradientDescent(neural_net=nnet, cost_function=cost_func)

        xes = [np.array([-10], float), np.array([100], float)]
        ys = [np.array([0.5], float), np.array([0.75], float)]

        gd.train(data_src=PreloadSource((xes, ys)), nepochs=100)

        for i in range(len(xes)):
            res = nnet.feed(xes[i])
            self.assertAlmostEqual(res[0], ys[i][0], places=1)
Ejemplo n.º 12
0
    def test_with_rectifer_activation_and_quadratic_cost(self):
        nnet = NetFactory.create_neural_net(
            sizes=[4, 2, 10],
            hidden_layer_activation=activation_functions.Rectifier,
            output_layer_activation=activation_functions.Rectifier)
        nnet.randomize_parameters()
        cost_func = cost_functions.QuadraticCost(neural_net=nnet)

        examples = helpers.generate_random_examples(10, 4, 10)
        calculator = BackPropagationBasedCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        numerical_calculator = NumericalCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        w_grad1, b_grad1 = calculator.compute_gradients()
        w_grad2, b_grad2 = numerical_calculator.compute_gradients()

        self.compare_grads(grad1=w_grad1, grad2=w_grad2)
        self.compare_grads(grad1=b_grad1, grad2=b_grad2)
Ejemplo n.º 13
0
    def test_with_regularized_quadratic_loss(self):
        nnet = NetFactory.create_neural_net(sizes=[4, 2, 10])
        nnet.randomize_parameters()

        reglambda = 2.5
        cost_func = cost_functions.QuadraticCost(neural_net=nnet,
                                                 l2_reg_term=reglambda)
        examples = helpers.generate_random_examples(10, 4, 10)
        calculator = BackPropagationBasedCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        numerical_calculator = NumericalCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)

        w_grad1, b_grad1 = calculator.compute_gradients()
        w_grad2, b_grad2 = numerical_calculator.compute_gradients()

        self.compare_grads(grad1=w_grad1, grad2=w_grad2)
        self.compare_grads(grad1=b_grad1, grad2=b_grad2)
Ejemplo n.º 14
0
    def test_costs_match(self):
        nnet = NetFactory.create_neural_net(sizes=[2, 3, 1, 10, 2])
        nnet.randomize_parameters()
        cost_func = CrossEntropyCost(nnet)

        X = [np.array([90, 23], float), np.array([0, 2], float)]
        Y = [np.array([0.4, 0.6], float), np.array([0.3, 0])]
        examples = PreloadSource((X, Y))
        c1 = cost_func.get_cost(examples)

        fname = os.path.join('test_temp', 'nets_params.json')
        nnet.save(dest_fname=fname)

        nnet = NeuralNet.create_from_file(fname)
        c2 = cost_func.get_cost(examples)
        self.assertAlmostEqual(c1, c2, places=4)
Ejemplo n.º 15
0
    def test_back_propagation_slow(self):
        nnet = NetFactory.create_neural_net(sizes=[1, 1, 1])
        cost_func = QuadraticCost(neural_net=nnet)
        x = np.array([5], float)
        y = np.array([0.25], float)
        examples = ([x], [y])

        numerical = NumericalCalculator(data_src=PreloadSource(examples),
                                        neural_net=nnet,
                                        cost_function=cost_func)
        w_grad, b_grad = numerical.compute_gradients()

        w_grad_expected = [np.array([[0]], float), np.array([[1 / 32]], float)]
        b_grad_expected = [np.array([[0]], float), np.array([[1 / 16]], float)]

        self.compare_grads(w_grad, w_grad_expected)
        self.compare_grads(b_grad, b_grad_expected)
Ejemplo n.º 16
0
    def test_back_propagation_slow_type_array(self):
        nnet = NetFactory.create_neural_net(sizes=[2, 1, 2])
        cost_func = QuadraticCost(neural_net=nnet)

        x = np.array([5, 2], float)
        y = np.array([0.25, 0], float)

        examples = ([x], [y])
        numerical = NumericalCalculator(data_src=PreloadSource(examples),
                                        neural_net=nnet,
                                        cost_function=cost_func)

        w_grad, b_grad = numerical.compute_gradients()
        self.assertIsInstance(w_grad, list)
        self.assertIsInstance(w_grad[0], np.ndarray)
        self.assertIsInstance(w_grad[1], np.ndarray)

        self.assertIsInstance(b_grad, list)
        self.assertIsInstance(b_grad[0], np.ndarray)
        self.assertIsInstance(b_grad[1], np.ndarray)
Ejemplo n.º 17
0
    def test_gives_correct_output_for_unseen_data(self):
        nnet = NetFactory.create_neural_net(sizes=[1, 10, 1])
        cost_func = QuadraticCost(neural_net=nnet)
        gd = GradientDescent(neural_net=nnet, cost_function=cost_func)

        def parabola(x):
            return x**2

        examples = helpers.generate_data(f=parabola,
                                         start_value=-0.6,
                                         end_value=-0.4,
                                         step_value=0.005)

        gd.train(data_src=PreloadSource(examples), nepochs=10)

        xval = -0.5000125
        yval = parabola(xval)

        net_input = np.array([xval], float)
        output = nnet.feed(net_input)
        self.assertAlmostEqual(output[0], yval, places=1)
Ejemplo n.º 18
0
    def test_returns_correct_gradient_shape(self):
        nnet = NetFactory.create_neural_net(sizes=[3, 2, 2, 5])
        cost_func = cost_functions.QuadraticCost(neural_net=nnet)

        x = np.array([5, 2, -0.5], float)
        y = np.array([0.25, 0, 0, 0.7, 0.2], float)
        examples = ([x], [y])
        numerical_calculator = NumericalCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        w_grad, b_grad = numerical_calculator.compute_gradients()
        self.assertEqual(len(w_grad), 3)
        self.assertEqual(len(b_grad), 3)
        self.assertTupleEqual(w_grad[0].shape, (2, 3))
        self.assertTupleEqual(w_grad[1].shape, (2, 2))
        self.assertTupleEqual(w_grad[2].shape, (5, 2))

        self.assertTupleEqual(b_grad[0].shape, (2, ))
        self.assertTupleEqual(b_grad[1].shape, (2, ))
        self.assertTupleEqual(b_grad[2].shape, (5, ))
Ejemplo n.º 19
0
    def test_that_returned_type_is_array(self):
        nnet = NetFactory.create_neural_net(sizes=[2, 1, 2])
        cost_func = cost_functions.QuadraticCost(neural_net=nnet)

        x = np.array([5, 2], float)
        y = np.array([0.25, 0], float)

        examples = ([x], [y])
        calculator = BackPropagationBasedCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)

        w_grad, b_grad = calculator.compute_gradients()
        self.assertIsInstance(w_grad, list)
        self.assertIsInstance(w_grad[0], np.ndarray)
        self.assertIsInstance(w_grad[1], np.ndarray)

        self.assertIsInstance(b_grad, list)
        self.assertIsInstance(b_grad[0], np.ndarray)
        self.assertIsInstance(b_grad[1], np.ndarray)
Ejemplo n.º 20
0
    def test_update_with_multiple_examples(self):
        self.nnet.randomize_parameters()

        self.examples = PreloadSource(
            ([np.array([5, 2], float),
              np.array([5, 22], float)],
             [np.array([0.25, 0, 1], float),
              np.array([0.5, 1, 0], float)]))

        cost_func = QuadraticCost(self.nnet)
        cost_before = cost_func.get_cost(self.examples)
        calculator = BackPropagationBasedCalculator(data_src=self.examples,
                                                    cost_function=cost_func,
                                                    neural_net=self.nnet)
        for i in range(10):
            w_grad, b_grad = calculator.compute_gradients()
            self.grad_descent.update_weights(weight_gradient=w_grad)
            self.grad_descent.update_biases(bias_gradient=b_grad)

        cost_after = cost_func.get_cost(self.examples)
        self.assertLess(cost_after, cost_before)
Ejemplo n.º 21
0
def step(context):
    mnist.download_dataset()
    context.training_data = PreloadSource(mnist.get_training_data())
    context.test_data = PreloadSource(mnist.get_test_data())