Example #1
0
def regularised_grad(theta, X, Y, lamb):
    """
    This method Calculates the derivative of the loss function.

    :param theta: Weight vectors
    :param X: input matrix
    :param Y: output matrix
    :param lamb: Regularization parameters
    :return: derived value (or slop of the Tangent Line to the function)
    """
    total_example = X.shape[0]
    theta = mat.c_[theta]
    # calculate prediction
    prediction = sigmoidFuntion.hypo(theta, X)
    sigm = sigmoidFuntion.sigmoid(prediction)

    optimum_grad = mat.multiply(1 / total_example,
                                mat.dot(X.transpose(), mat.subtract(sigm, Y)))

    # regularising
    reg = mat.multiply(lamb / total_example, theta[1:theta.shape[0]])
    reg = mat.add(optimum_grad[1:optimum_grad.shape[0], :], reg)

    regularised_Para = mat.c_[optimum_grad[0, :], reg.transpose()]

    return regularised_Para.transpose()
Example #2
0
def predict(parameters, X, Y):
    prediction = sigmoidFuntion.sigmoid(sigmoidFuntion.hypo(parameters, X))
    prediction = mat.argmax(prediction, axis=1)

    prediction = mat.subtract(mat.c_[prediction], Y)

    # calculate accuracy
    accuracy = ((len(mat.where(prediction == 0)[0])) / len(Y)) * 100
    return int(accuracy)
Example #3
0
def checkAccuracy(optimised_Parameter, X, Y):
    # calculate prediction

    hypo = sigmoidFuntion.hypo(optimised_Parameter, X)
    prediction = sigmoidFuntion.sigmoid(hypo)
    prediction = mat.where(prediction >= 0.5, 1, prediction)
    prediction = mat.where(prediction < 0.5, 0, prediction)
    prediction = mat.subtract(mat.c_[prediction], Y)

    # calculate accuracy
    accuracy = ((len(mat.where(prediction == 0)[0]))/len(Y))*100
    return int(accuracy)
Example #4
0
def regularised_cost(theta, X, Y, lamb):
    """
    This method Calculates the error difference using Maximum Likelihood Technique and also add Regularisation.

    :param theta: weight vectors
    :param X: input matrix
    :param Y: output matrix
    :param lamb: Regularization parameter
    :return: error/cost
    """
    total_example = X.shape[0]
    theta = mat.c_[theta]

    # calculate prediction
    prediction = sigmoidFuntion.hypo(theta, X)
    sigm = sigmoidFuntion.sigmoid(prediction)

    # Loss when when Y=1
    loss0 = mat.dot(-Y.transpose(), mat.log(sigm))

    # loss when Y=0

    loss1 = mat.dot(
        mat.subtract(1, Y).transpose(), mat.log(mat.subtract(1, sigm)))

    # Total Avg loss

    loss_final = mat.multiply((1 / total_example), mat.subtract(loss0, loss1))

    # calculate cost

    # regularize parameter = 1/2m * sum(theta(i)^2) from i=1 to n where n is number of features

    regularized = mat.dot(
        lamb / (2 * total_example),
        mat.dot(theta[1:theta.shape[0], :].transpose(),
                theta[1:theta.shape[0], :]))

    return mat.add(loss_final, regularized)
plot.show()

# add ones to the X matrix and initial parameters
X = mat.c_[mat.ones(rows), X]
initial_theta = mat.zeros(clm)
lamb = 0
# calculate cost
cost = LossandGradient.regularised_cost(initial_theta, X, Y, lamb)
print("initial Cost = ", cost)

# Calculating optimum parameter using built in optimize function

Result = opt.minimize(fun=LossandGradient.regularised_cost,
                      x0=initial_theta,
                      args=(X, Y, lamb),
                      method="TNC",
                      jac=LossandGradient.regularised_grad)

opt_grad = Result.x
print("optimum grads are = ", opt_grad)

# calculating cost again

cost = LossandGradient.regularised_cost(opt_grad, X, Y, lamb)

print("final cost = ", cost)

print(
    "Chance of student with Exam 1 score = 45 and Exam two score with = 85 getting admit or not  ",
    (sigmoidFuntion.sigmoid(mat.dot(mat.c_[1, 45, 85], opt_grad))) * 100, "%")