Exemple #1
0
def calc_grad(theta, X, y, lambda_):
    m = len(y)
    h = ex2.sigmoid(X.dot(theta))
    error = h - y
    r = lambda_ / m * theta
    r[0] = 0
    grad = X.T.dot(error) / m + r
    return grad
Exemple #2
0
def cost_reg(theta, x, y, lambda_):
    "Compute cost for logistic regression with regularization"
    m = len(x)
    theta = np.matrix(theta)
    x = np.matrix(x)
    y = np.matrix(y)

    h = sigmoid(x * theta.T)

    log_l = np.multiply(-y, np.log(h)) - np.multiply(1 - y, np.log(1 - h))

    reg = (lambda_ / 2 * m) * np.power(theta[:, 1:theta.shape[1]], 2).sum()
    return log_l.sum() / m + reg
Exemple #3
0
def cost_function_reg(theta, X, y, Lambda):
    """Compute cost and gradient for logistic regression with regularization

    compute the cost of using theta as the parameter for regularized logistic regression
    and the gradient of the cost w.r.t. to the parameters. 
    """
    # Initialize some useful values
    m, n = X.shape
    theta = theta.reshape(n, 1)

    h = sigmoid(X.dot(theta))
    J = np.sum(-y*np.log(h)-(1-y)*np.log(1-h))/m + Lambda*np.sum(theta[1:]**2)/m/2
    grad = X.T.dot(h-y)/m
    grad[1:] = grad[1:] + Lambda*theta[1:]/m
    return J, grad.ravel()
Exemple #4
0
def gradient_reg(theta, x, y, lambda_):
    "Compute gradient for logistic regression with regularization"
    m = len(x)
    theta = np.matrix(theta)
    x = np.matrix(x)
    y = np.matrix(y)

    parameters = int(theta.ravel().shape[1])
    grad = np.zeros(parameters)
    h = sigmoid(x * theta.T)
    error = h - y

    for i in range(parameters):
        g = (error.T * x[:, i]) / m
        grad[i] = g if (i == 0) else g + ((lambda_ / m) * theta[:, i])

    return grad
Exemple #5
0
def calc_J(theta, X, y, lambda_):
    m = len(y)
    h = ex2.sigmoid(X.dot(theta))
    t = -y * np.log(h) - (1 - y) * np.log(1 - h)
    J = sum(t) / m + lambda_ / (2 * m) * sum(theta[1:-1]**2)
    return J[0]