def plotDecisionBoundary(theta, X, y):
    import numpy as np
    import matplotlib.pyplot as plt
    from ex2_logistic_regression.plotData import plotData
    plotData(X[:, 1:], y)

    m, n = np.shape(X)
    if n <= 3:
        min_x = min(X[:, 1])
        max_x = max(X[:, 1])
        x = np.array([X[:, 1].min(), X[:, 1].max()])
        if n < 2:
            theta[3] = 1
        y = [f_y(theta, min_x), f_y(theta, max_x)]

        plt.figure(1)
        plt.title('Linear regression With GCD')
        plt.xlabel('x')
        plt.ylabel('y')
        plt.scatter(x, y, marker='o', color='k', s=10, label='point')
        plt.legend(loc='lower right')
        plt.plot(x, y)
        plt.show()
    else:
        from ex2_logistic_regression.mapFeature import mapFeature
        x = np.linspace(-1, 1.5, 50)
        y = np.linspace(-1, 1.5, 50)
        z = np.zeros(shape=(len(x), len(y)))
        for i in range(len(x)):
            for j in range(len(y)):
                z[i, j] = (mapFeature([x[i]], [y[j]]).dot(theta))
        z = z.T
        c = plt.contour(x, y, z, 0, origin='upper')
        c.collections[0].set_label('Decision Boundary')
    def test_plotData(self):
        from ex2_logistic_regression.plotData import plotData
        from utils import file_utils
        import matplotlib.pyplot as plt

        x, y = file_utils.read_csv_split_last_col(data_file_path)
        plotData(x, y)
        plt.title('Figure 3: Plot of traning data')
        plt.xlabel('Marcochip test 1')
        plt.ylabel('Marcochip test 2')
        plt.show()
Example #3
0
    def test_plotData(self):
        from ex2_logistic_regression.plotData import plotData
        from utils import file_utils
        import matplotlib.pyplot as plt

        x, y = file_utils.read_csv_split_last_col(data_file_path)
        plotData(x, y)
        plt.title('Scatter plot of training data')
        plt.xlabel('Exam 1 score')
        plt.ylabel('Exam 2 score')
        plt.show()
Example #4
0
def visualizeBoundary(X, y, model):
    plotData(X, y, ("Pos", "Neg"))
    # Make classification predictions over a grid of values
    x1plot = np.linspace(np.min(X[:, 0]), np.max(X[:, 0]), 100)
    x2plot = np.linspace(np.min(X[:, 1]), np.max(X[:, 1]), 100)
    X1, X2 = np.meshgrid(x1plot, x2plot)
    vals = np.zeros(X1.shape)
    for i in range(X1.shape[1]):
        this_X = np.hstack((X1[:, i:i + 1], X2[:, i:i + 1]))
        vals[:, i] = model.predict(this_X)

    # Plot the SVM boundary
    plt.contour(X1, X2, vals, levels=[0])
Example #5
0
def plotDecisionBoundaryReg(theta, X, y):
    plt = plotData(X, y, ('y = 1', 'y = 0'))
    x1_vec, x2_vec = np.meshgrid(np.linspace(-1, 1.5), np.linspace(-1, 1.5))
    hypothesis = sigmoid(np.dot(mapFeature(x1_vec.flatten(), x2_vec.flatten()), theta)).reshape(x1_vec.shape)
    plt.contour(x1_vec, x2_vec, hypothesis, levels=0.5, colors='g')
    plt.show()
    return
Example #6
0
from ex6_support_vector_machines.gaussianKernel import gaussianKernel
from ex6_support_vector_machines.visualizeBoundary import visualizeBoundary
from ex6_support_vector_machines.dataset3Params import dataset3Params

print('Loading and Visualizing Data ...\n')

# % Load from ex6data1:
# % You will have X, y in your environment
mat = io.loadmat('ex6data1.mat')
X, y = (
    mat['X'],
    mat['y'],
)

# % Plot training data
plotData(X, y, ("Pos", "Neg"))
plt.show()

# fprintf('Program paused. Press enter to continue.\n');
# pause;
#
# %% ==================== Part 2: Training Linear SVM ====================
# %  The following code will train a linear SVM on the dataset and plot the
# %  decision boundary learned.
# %
#
# % Load from ex6data1:
# % You will have X, y in your environment
# load('ex6data1.mat');
#
print('\nTraining Linear SVM ...\n')