예제 #1
0
    ## =========== Part 1: Regularized Logistic Regression ============
    #  In this part, you are given a dataset with data points that are not
    #  linearly separable. However, you would still like to use logistic
    #  regression to classify the data points.
    #
    #  To do so, you introduce more features to use -- in particular, you add
    #  polynomial features to our data matrix (similar to polynomial
    #  regression).
    #

    # Add Polynomial Features

    # Note that mapFeature also adds a column of ones for us, so the intercept
    # term is handled
    X = mapFeature(X, degree=6)
    y = np.array(y)

    # Initialize fitting parameters
    initial_theta = np.zeros(np.size(X[0]))

    # Set regularization parameter lambda to 1
    lamb = 1

    # Compute and display initial cost and gradient for regularized logistic
    # regression
    [cost, grad] = costFunctionReg(initial_theta, X, y, lamb)
    print 'Cost at initial theta (zeros): %f' % cost
    print 'Gradient at initial theta (zeros):', grad
    print ''
예제 #2
0
    ## =========== Part 1: Regularized Logistic Regression ============
    #  In this part, you are given a dataset with data points that are not
    #  linearly separable. However, you would still like to use logistic
    #  regression to classify the data points.
    #
    #  To do so, you introduce more features to use -- in particular, you add
    #  polynomial features to our data matrix (similar to polynomial
    #  regression).
    #

    # Add Polynomial Features

    # Note that mapFeature also adds a column of ones for us, so the intercept
    # term is handled
    X_long = mapFeature(X, degree=6)
    y_long = np.array(y)

    # Initialize fitting parameters
    initial_theta = np.zeros(np.size(X_long[0]))

    # Set regularization parameter lambda to 1
    lamb = 0.1

    # Compute and display initial cost and gradient for regularized logistic
    # regression
    [cost, grad] = costFunctionReg(initial_theta, X_long, y_long, lamb)
    print 'Cost at initial theta (zeros): %f' % cost
    print 'Gradient at initial theta (zeros):'
    print grad
    print ''
예제 #3
0
    ## ==================== Part 1: Plotting ====================
    #  We start the exercise by first plotting the data to understand the
    #  the problem we are working with.

    print 'Plotting data with + indicating (y = 1) examples and o indicating (y = 0) examples.\n'

    figure = plotData(X, y)

    ## ============ Part 2: Compute Cost and Gradient ============

    #  Setup the data matrix appropriately, and add ones for the intercept term
    m, n = np.shape(X)

    # Add intercept term to x and X_test
    X_array = mapFeature(X)
    y_array = np.array(y)

    # Initialize fitting parameters
    initial_theta = np.zeros(n + 1)

    # Compute and display initial cost and gradient
    cost, grad = costFunction(initial_theta, X_array, y_array)

    print 'Cost at initial theta (zeros): %f' % cost
    print 'Gradient at initial theta (zeros):', grad
    print ''

    ## ============= Part 3: Optimizing  =============
    # From scipy.optimize, the minimize function looks to offer similar functionality
    # to fminunc.
예제 #4
0
    ## =========== Part 1: Regularized Logistic Regression ============
    #  In this part, you are given a dataset with data points that are not
    #  linearly separable. However, you would still like to use logistic
    #  regression to classify the data points.
    #
    #  To do so, you introduce more features to use -- in particular, you add
    #  polynomial features to our data matrix (similar to polynomial
    #  regression).
    #

    # Add Polynomial Features

    # Note that mapFeature also adds a column of ones for us, so the intercept
    # term is handled
    X_long = mapFeature(X, degree=6)
    y_long = np.array(y)

    # Initialize fitting parameters
    initial_theta = np.zeros(np.size(X_long[0]))

    # Set regularization parameter lambda to 1
    lamb = 0.1

    # Compute and display initial cost and gradient for regularized logistic
    # regression
    [cost, grad] = costFunctionReg(initial_theta, X_long, y_long, lamb)
    print 'Cost at initial theta (zeros): %f' % cost
    print 'Gradient at initial theta (zeros):'
    print grad
    print ''