Exemplo n.º 1
0
def cost(theta, x, y):
    """
    Cost of the logistic regression function.

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: cost
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #
    # Write the cost of logistic regression as defined in the lecture
    # Hint:
    #   - use the logistic function sig imported from the file toolbox
    #   - sums of logs of numbers close to zero might lead to numerical errors, try splitting the cost into the sum
    # over positive and negative samples to overcome the problem. If the problem remains note that low errors is not
    # necessarily a problem for gradient descent because only the gradient of the cost is used for the parameter updates.

    c = 0
    for i in range(N):
        c += (y[i]*np.log(sig(np.dot(x[i], theta))) + (1-y[i])*np.log(1 - sig(np.dot(x[i], theta))))
    c /= -N

    #print("cost is: ",c)
    return c
Exemplo n.º 2
0
def grad(theta, x, y):
    """

    Computes the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    N, n = x.shape

    #  TODO - prefer numpy vectorized operations over for loops

    g = np.zeros(theta.shape)
    for gradCounter in np.arange(len(theta)):
        g_temp = 0
        for i in np.arange(N):
            z = np.dot(theta, x[i])
            sigmoid = sig(z)
            y_num = 0
            if (y[i] == True):
                y_num = 1
            g_temp += (sigmoid - y_num) * x[i][gradCounter]
        g[gradCounter] = g_temp / N
    return g
Exemplo n.º 3
0
def cost(theta, x, y):
    """
    Cost of the logistic regression function.

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: cost
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #
    # Write the cost of logistic regression as defined in the lecture
    # Hint:
    #   - use the logistic function sig imported from the file toolbox
    #   - sums of logs of numbers close to zero might lead to numerical errors, try splitting the cost into the sum
    # over positive and negative samples to overcome the problem. If the problem remains note that low errors is not
    # necessarily a problem for gradient descent because only the gradient of the cost is used for the parameter updates.
    hypo = sig(np.dot(x,theta))

    truehypoindexes = np.where(y)[0]
    falsehypo = np.delete(hypo, truehypoindexes)

    cost0 = sum(-np.log(1 - falsehypo))
    cost1 = sum(-np.log(hypo[truehypoindexes]))

    c = (cost0+cost1)/N

    # END TODO
    ###########

    return c
Exemplo n.º 4
0
def grad(theta, x, y):
    """

    Compute the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #

    x_O = x.dot(theta)
    h_O = sig(x_O)
    tmp = h_O - y
    g = 1. / N * tmp.dot(x)

    # END TODO
    ###########

    return g
Exemplo n.º 5
0
def grad(theta, x, y):
    """

    Compute the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #

    g = np.zeros(theta.shape)

    for i in range(0, g.shape[0]):
        sum_ = 0
        for j in range(0, N):
            p = sig(x[j].dot(theta))
            sum_ = sum_ + (p - y[j]) * x[j][i]
        g[i] = sum_ / N

    # END TODO
    ###########

    return g
Exemplo n.º 6
0
def grad(theta, x, y):
    """

    Compute the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    #N, n = x.shape

    ##############
    #
    # TODO
    #
    #   - prefer numpy vectorized operations over for loops

    m = x.shape[0]
    h_theta = sig(np.dot(x,theta))
    g = (1/m)*(np.dot((h_theta-y),x))

    # END TODO
    ###########

    return g
Exemplo n.º 7
0
def cost(theta, x, y):
    """
    Cost of the logistic regression function.

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: cost
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #
    # Write the cost of logistic regression as defined in the lecture

    c = 0
    for i in range(0, N):
        p = sig(x[i].dot(theta))
        if y[i] == 0:
            c += np.log(1 - p)
        else:
            c += np.log(p)

    c = -c / N

    # END TODO
    ###########

    return c
Exemplo n.º 8
0
def grad(theta, x, y):
    """

    Compute the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #

    g = np.zeros(theta.shape)

    for j in range(0, n):
        sum_i = 0
        for i in range(N):
            sum_i += (sig(np.dot(theta, np.transpose(x[i]))) - y[i]) * x[i][j]
        g[j] = (1.0 / N) * np.sum(sum_i)

    # END TODO
    ###########

    return g
Exemplo n.º 9
0
def grad(theta, x, y):
    """

    Computes the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #
    #   - prefer numpy vectorized operations over for loops

    #   print("1 ",np.dot(x,theta))
    # print("2 ",np.dot(x[0],theta))
    g = np.zeros(theta.shape)

    for j in range(0, n):
        for i in range(0, N):
            g[j] += 1 / N * ((sig(np.dot(x[i], theta)) - y[i]) * x[i][j])

    # END TODO
    ###########

    return g
Exemplo n.º 10
0
def cost(theta, x, y):
    """
    Computes the cost of the logistic regression function.

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: cost
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #
    # Write the cost of logistic regression as defined in the lecture
    # Hint:
    #   - use the logistic function sig imported from the file toolbox
    #   - prefer numpy vectorized operations over for loops
    #
    # WARNING: If you run into instabilities during the exercise this
    #   could be due to the usage log(x) with x very close to 0. Some
    #   implementations are more or less sensible to this issue, you
    #   may try another one. A (dirty) trick is to replace log(x) with
    #   log(x + epsilon) with epsilon a very small number like 1e-20
    #   or 1e-10 but the gradients might not be exact anymore.

    c = 0
    c = -1 / N * (y * np.log(sig(np.dot(x, theta))) +
                  (1 - y) * np.log(1 - sig(np.dot(x, theta))))

    c = c.sum()

    # END TODO
    ###########

    return c
Exemplo n.º 11
0
def grad(theta, x, y):
    """

    Compute the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    N, n = x.shape

    h = sig(np.matmul(x, theta)) - y
    g = np.matmul(h.T, x) / N

    return g
Exemplo n.º 12
0
def grad(theta, x, y):
    """

    Compute the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    N, n = x.shape

    g = np.zeros(theta.shape)

    for j in range(len(theta)):
        for i in range(N):
            g[j] += (sig(np.dot(x[i], theta)) - y[i]) * x[i][j]
    g /= N

    return g
Exemplo n.º 13
0
def cost(theta, x, y):
    """
    Cost of the logistic regression function.

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: cost
    """
    N, n = x.shape

    c = 0
    h = sig(np.matmul(x, theta))
    for hi, yi in zip(h, y):
        if yi == 0:
            c += -1 * log(1 - hi)
        else:
            c += -1 * log(hi)
    c = c / N

    return np.array([c])
Exemplo n.º 14
0
def cost(theta, x, y):
    """
    Cost of the logistic regression function.

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: cost
    """
    #N, n = x.shape

    ##############
    #
    # TODO
    #
    # Write the cost of logistic regression as defined in the lecture
    # Hint:
    #   - use the logistic function sig imported from the file toolbox
    #   - prefer numpy vectorized operations over for loops
    # 
    # WARNING: If you run into instabilities during the exercise this
    #   could be due to the usage log(x) with x very close to 0. Some
    #   implementations are more or less sensible to this issue, you
    #   may try another one. A (dirty) trick is to replace log(x) with
    #   log(x + epsilon) with epsilon a very small number like 1e-20
    #   or 1e-10 but the gradients might not be exact anymore. This
    #   problem sometimes raises only when minizing the cost function
    #   with the scipy optimizer.
    m = x.shape[0]
    log_eps = 1e-20
    h_theta = sig(np.dot(x,theta))
    c = -(1/m)*(np.dot(y,np.log(h_theta+log_eps))+np.dot((1-y),np.log(1-h_theta+log_eps)))

    

    # END TODO
    ###########

    return c
Exemplo n.º 15
0
def grad(theta, x, y):
    """

    Compute the gradient of the cost of logistic regression

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: gradient
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #
    hypo = sig(np.dot(x, theta))
    g = np.zeros(theta.shape)
    error = hypo-y
    g = (1/N) * (np.dot(error.T, x))
    # END TODO
    ###########

    return g
Exemplo n.º 16
0
def cost(theta, x, y):
    """
    Cost of the logistic regression function.

    :param theta: parameter(s)
    :param x: sample(s)
    :param y: target(s)
    :return: cost
    """
    N, n = x.shape

    ##############
    #
    # TODO
    #

    x_O = x.dot(theta)
    h_O = sig(x_O)
    c = -np.sum(np.dot(y, np.log(h_O)) + np.dot((1 - y), np.log(1 - h_O))) / N

    # END TODO
    ###########

    return c