示例#1
0
    def test_plot_decision_boundary(self):
        negatives = self.data[self.data[:, 2] ==
                              0]  # SELECT * FROM self.data WHERE col2 == 0
        positives = self.data[self.data[:, 2] ==
                              1]  # SELECT * FROM self.data WHERE col2 == 1
        plt.xlabel("Microchip Test 1")
        plt.ylabel("Microchip Test 2")
        plt.scatter(negatives[:, 0],
                    negatives[:, 1],
                    c='y',
                    marker='o',
                    s=40,
                    linewidths=1,
                    label="y=0")
        plt.scatter(positives[:, 0],
                    positives[:, 1],
                    c='b',
                    marker='+',
                    s=40,
                    linewidths=2,
                    label="y=1")

        dimension = 6
        X_mapped = map_feature(self.X[:, 0], self.X[:, 1], dimension)
        m, n = X_mapped.shape
        X_mapped = np.hstack((np.ones((m, 1)), X_mapped))
        theta = np.zeros((n + 1, 1))
        lamda = 1.0
        theta_optimized, min_cost = regularized_gradient_descent(
            X_mapped, self.y, theta, lamda)

        x1 = np.linspace(-1, 1.5, 50)
        x2 = np.linspace(-1, 1.5, 50)

        X1, X2 = np.meshgrid(x1, x2)
        hypo = np.zeros((len(x1), len(x2)))
        for i in range(0, len(x1)):
            for j in range(0, len(x2)):
                mapped = map_feature(
                    np.array([X1[i][j]]).reshape((1, 1)),
                    np.array([X2[i][j]]).reshape((1, 1)), dimension)
                mapped = np.hstack((np.ones((1, 1)), mapped))
                hypo[i][j] = hypothesis(mapped, theta_optimized)[0]

        plt.contour(X1, X2, hypo, [0.5], label='Decision Boundary')
        plt.legend()
        plt.show()
示例#2
0
 def test_regularized_gradient_descent(self):
     dimension = 6
     X_mapped = map_feature(self.X[:, 0], self.X[:, 1], dimension)
     m, n = X_mapped.shape
     X_mapped = np.hstack((np.ones((m, 1)), X_mapped))
     theta = np.zeros((n + 1, 1))
     lamda = 1.0
     theta_optimized, min_cost = regularized_gradient_descent(
         X_mapped, self.y, theta, lamda)
     expected_theta_optimized = np.array([
         1.27268726, 0.62557024, 1.18096643, -2.01919814, -0.91761464,
         -1.43194196, 0.12375928, -0.36513066, -0.35703386, -0.17485797,
         -1.4584374, -0.05129691, -0.6160397, -0.27464158, -1.19282551,
         -0.24270352, -0.20570051, -0.04499796, -0.27782728, -0.29525866,
         -0.45613268, -1.0437783, 0.02762813, -0.29265655, 0.01543383,
         -0.32759297, -0.14389219, -0.92460139
     ])
     expected_min_cost = 0.5290027422883413
     np.testing.assert_almost_equal(theta_optimized,
                                    expected_theta_optimized,
                                    decimal=5)
     self.assertAlmostEqual(min_cost, expected_min_cost, places=3)