Example #1
0
def cost_func(t):
    return cfr.cost_function_reg(t, X, y, lmd)[0]
Example #2
0
def grad_func(t):
    return cfr.cost_function_reg(t, X, y, lmd)[1]
Example #3
0
#

# Add polynomial features

# Note that mapFeature also adds a column of ones for us, so the intercept
# term is handled
X = mf.map_feature(X[:, 0], X[:, 1])

# Initialize fitting parameters
initial_theta = np.zeros(X.shape[1])

# Set regularization parameter lambda to 1
lmd = 1

# Compute and display initial cost and gradient for regularized logistic regression
cost, grad = cfr.cost_function_reg(initial_theta, X, y, lmd)

np.set_printoptions(formatter={'float': '{: 0.4f}\n'.format})
print('Cost at initial theta (zeros): {}'.format(cost))
print('Expected cost (approx): 0.693')
print(
    'Gradient at initial theta (zeros) - first five values only: \n{}'.format(
        grad[0:5]))
print(
    'Expected gradients (approx) - first five values only: \n 0.0085\n 0.0188\n 0.0001\n 0.0503\n 0.0115'
)

raw_input('Program paused. Press ENTER to continue')

# Compute and display cost and gradient with non-zero theta
test_theta = np.ones(X.shape[1])
Example #4
0
def grad_func(t, X, y):
    t = t.reshape((len(t), 1))
    return cfr.cost_function_reg(t, X, y, lmd)[1]
Example #5
0
    y = data[1]
    plot_data(X, y)
    plt.xlabel('Microchip Test 1')
    plt.ylabel('Microchip Test 2')
    plt.legend(["y = 1", "y = 0"])
    plt.pause(1.5)
    plt.close()

    # =========== Part 1: Regularized Logistic Regression ============
    X = map_feature(X[:, 0], X[:, 1])
    # Initialize fitting parameters
    initial_theta = np.zeros((X.shape[1], 1))
    # Set regularization parameter lambda to 1
    reg_lambda = 1
    # Compute and display initial cost and gradient for regularized logistic regression
    cost, grad = cost_function_reg(initial_theta, X, y, reg_lambda)
    print('Cost at initial theta (zeros): ', cost,
          '\nExpected cost (approx): 0.693\n')
    np.set_printoptions(suppress=True)
    print('Gradient at initial theta (zeros) - first five values only:\n',
          grad[0:5])
    print(
        '\nExpected gradients (approx) - first five values only:\n 0.0085\n 0.0188\n 0.0001\n 0.0503\n 0.0115\n'
    )
    print('\nProgram paused. Press enter to continue.\n')
    # pause_func()

    # Compute and display cost and gradient with all-ones theta and lambda = 10
    test_theta = np.ones((X.shape[1], 1))
    cost, grad = cost_function_reg(test_theta, X, y, 10)
    print('Cost at test theta (with lambda = 10): ', cost,
def gradient(t):
    return cost_function_reg(t, X, y, lamb)[1]
def cost_f(t):
    return cost_function_reg(t, X, y, lamb)[0]