def test_update_biases_decreases_cost(self):
        cost_func = QuadraticCost(self.nnet)
        cost_before = cost_func.get_cost(self.examples)
        calculator = BackPropagationBasedCalculator(data_src=self.examples,
                                                    cost_function=cost_func,
                                                    neural_net=self.nnet)

        for i in range(5):
            w_grad, b_grad = calculator.compute_gradients()
            self.grad_descent.update_biases(bias_gradient=b_grad)

        cost_after = cost_func.get_cost(self.examples)
        self.assertLess(cost_after, cost_before)
示例#2
0
 def training_epoch(self, data_src):
     """
     Perform a single iteration of gradient descent.
     
     :param data_src: an instance of DataSource sub class 
     :return: None
     """
     cost_function = self._cost_function
     gradient_calculator = BackPropagationBasedCalculator(
         data_src=data_src,
         neural_net=self._nnet,
         cost_function=cost_function)
     wgrad, bgrad = gradient_calculator.compute_gradients()
     self.update_weights(weight_gradient=wgrad)
     self.update_biases(bias_gradient=bgrad)
示例#3
0
    def test_compute_gradients_with_cross_entropy_cost(self):
        nnet = NetFactory.create_neural_net(sizes=[4, 2, 10])
        nnet.randomize_parameters()

        cost_func = cost_functions.CrossEntropyCost(neural_net=nnet)
        examples = helpers.generate_random_examples(10, 4, 10)
        calculator = BackPropagationBasedCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        numerical_calculator = NumericalCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        w_grad1, b_grad1 = calculator.compute_gradients()
        w_grad2, b_grad2 = numerical_calculator.compute_gradients()

        self.compare_grads(grad1=w_grad1, grad2=w_grad2)
        self.compare_grads(grad1=b_grad1, grad2=b_grad2)
示例#4
0
    def test_that_returned_type_is_array(self):
        nnet = NetFactory.create_neural_net(sizes=[2, 1, 2])
        cost_func = cost_functions.QuadraticCost(neural_net=nnet)

        x = np.array([5, 2], float)
        y = np.array([0.25, 0], float)

        examples = ([x], [y])
        calculator = BackPropagationBasedCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)

        w_grad, b_grad = calculator.compute_gradients()
        self.assertIsInstance(w_grad, list)
        self.assertIsInstance(w_grad[0], np.ndarray)
        self.assertIsInstance(w_grad[1], np.ndarray)

        self.assertIsInstance(b_grad, list)
        self.assertIsInstance(b_grad[0], np.ndarray)
        self.assertIsInstance(b_grad[1], np.ndarray)
    def test_update_with_multiple_examples(self):
        self.nnet.randomize_parameters()

        self.examples = PreloadSource(
            ([np.array([5, 2], float),
              np.array([5, 22], float)],
             [np.array([0.25, 0, 1], float),
              np.array([0.5, 1, 0], float)]))

        cost_func = QuadraticCost(self.nnet)
        cost_before = cost_func.get_cost(self.examples)
        calculator = BackPropagationBasedCalculator(data_src=self.examples,
                                                    cost_function=cost_func,
                                                    neural_net=self.nnet)
        for i in range(10):
            w_grad, b_grad = calculator.compute_gradients()
            self.grad_descent.update_weights(weight_gradient=w_grad)
            self.grad_descent.update_biases(bias_gradient=b_grad)

        cost_after = cost_func.get_cost(self.examples)
        self.assertLess(cost_after, cost_before)
示例#6
0
    def test_with_rectifer_activation_and_quadratic_cost(self):
        nnet = NetFactory.create_neural_net(
            sizes=[4, 2, 10],
            hidden_layer_activation=activation_functions.Rectifier,
            output_layer_activation=activation_functions.Rectifier)
        nnet.randomize_parameters()
        cost_func = cost_functions.QuadraticCost(neural_net=nnet)

        examples = helpers.generate_random_examples(10, 4, 10)
        calculator = BackPropagationBasedCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        numerical_calculator = NumericalCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        w_grad1, b_grad1 = calculator.compute_gradients()
        w_grad2, b_grad2 = numerical_calculator.compute_gradients()

        self.compare_grads(grad1=w_grad1, grad2=w_grad2)
        self.compare_grads(grad1=b_grad1, grad2=b_grad2)
示例#7
0
    def test_with_regularized_quadratic_loss(self):
        nnet = NetFactory.create_neural_net(sizes=[4, 2, 10])
        nnet.randomize_parameters()

        reglambda = 2.5
        cost_func = cost_functions.QuadraticCost(neural_net=nnet,
                                                 l2_reg_term=reglambda)
        examples = helpers.generate_random_examples(10, 4, 10)
        calculator = BackPropagationBasedCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)
        numerical_calculator = NumericalCalculator(
            data_src=PreloadSource(examples),
            neural_net=nnet,
            cost_function=cost_func)

        w_grad1, b_grad1 = calculator.compute_gradients()
        w_grad2, b_grad2 = numerical_calculator.compute_gradients()

        self.compare_grads(grad1=w_grad1, grad2=w_grad2)
        self.compare_grads(grad1=b_grad1, grad2=b_grad2)
示例#8
0
def step(context):
    cost_func = context.cost_function
    calculator = BackPropagationBasedCalculator(data_src=context.training_data,
                                                neural_net=context.nnet,
                                                cost_function=cost_func)
    context.back_prop_gradients = calculator.compute_gradients()