Пример #1
0
    def test_gradient_descent_3_parameters(self):
        """
        Test based on Assignment 1 of the free online
        Stanford Machine Learning online course.
        """
        dataset = loader.load(self.relative_to_base("datasets/ex1data2.txt"),
                              has_ids=False,
                              has_header=False,
                              has_labels=True,
                              delimiter=",")
        dataset.normalize_features()
        dataset.set_column("bias", pd.Series([1] * dataset.num_samples()))

        learning_rate = 1.0
        iter = 50

        initial_theta = pd.Series({0: 0, 1: 0, "bias": 0})
        theta = optimize.gradient_descent(dataset,
                                          initial_theta,
                                          learning_rate,
                                          iterations=iter)

        assert_that(
            theta,
            equals_series(
                {
                    0: 110631.050279,
                    1: -6649.474271,
                    "bias": 340412.659574
                },
                places=6))
Пример #2
0
    def test_gradient_descent_2_parameters(self):
        """
        Test based on Assignment 1 of the free online
        Stanford Machine Learning online course.

        For population = 35,000, we predict a profit of 4519.767868
        For population = 70,000, we predict a profit of 45342.450129

        Final cost: 4.483388
        """
        dataset = loader.load(self.relative_to_base("datasets/ex1data1.txt"),
                              has_ids=False, has_header=False, has_labels=True,
                              delimiter=",")
        dataset.set_column("bias", pd.Series([1] * dataset.num_samples()))

        learning_rate = 0.01
        iter = 100

        initial_theta = pd.Series({0: 0, "bias": 0})
        theta = optimize.gradient_descent(dataset, initial_theta,
                                          learning_rate, iterations=iter)

        # assert_that(theta.tolist(), contains(-0.576556, 0.859582))
        assert_that(theta, equals_series({0: 0.859582,
                                         "bias": -0.576556},
                                         places=6))
Пример #3
0
    def test_gradient_descent_2_parameters(self):
        """
        Test based on Assignment 1 of the free online
        Stanford Machine Learning online course.

        For population = 35,000, we predict a profit of 4519.767868
        For population = 70,000, we predict a profit of 45342.450129

        Final cost: 4.483388
        """
        dataset = loader.load(self.relative_to_base("datasets/ex1data1.txt"),
                              has_ids=False,
                              has_header=False,
                              has_labels=True,
                              delimiter=",")
        dataset.set_column("bias", pd.Series([1] * dataset.num_samples()))

        learning_rate = 0.01
        iter = 100

        initial_theta = pd.Series({0: 0, "bias": 0})
        theta = optimize.gradient_descent(dataset,
                                          initial_theta,
                                          learning_rate,
                                          iterations=iter)

        # assert_that(theta.tolist(), contains(-0.576556, 0.859582))
        assert_that(theta,
                    equals_series({
                        0: 0.859582,
                        "bias": -0.576556
                    }, places=6))
Пример #4
0
    def test_gradient_descent_3_parameters(self):
        """
        Test based on Assignment 1 of the free online
        Stanford Machine Learning online course.
        """
        dataset = loader.load(self.relative_to_base("datasets/ex1data2.txt"),
                              has_ids=False, has_header=False, has_labels=True,
                              delimiter=",")
        dataset.normalize_features()
        dataset.set_column("bias", pd.Series([1] * dataset.num_samples()))

        learning_rate = 1.0
        iter = 50

        initial_theta = pd.Series({0: 0, 1: 0, "bias": 0})
        theta = optimize.gradient_descent(dataset, initial_theta,
                                          learning_rate, iterations=iter)

        assert_that(theta, equals_series({0: 110631.050279,
                                          1: -6649.474271,
                                         "bias": 340412.659574},
                                         places=6))