Ejemplo n.º 1
0
def test(X, y, learned_params):
    
    N = np.shape(X)[0] #no of instances
    X = np.append(np.ones((N,1)), X,1) #appending a column of ones as bias (used in logistic regression weights prediction)
    F = np.shape(X)[1] #no of features+1
    
    p_old = 1
    class_prob = []
    for w in learned_params.keys():
        p = Utils.logistic_transformation( learned_params[w], X )
        class_prob.append(p_old-p)
        p_old = p
    class_prob.append(p_old)
    


    max_prob = np.max(class_prob, 0)

    predicted_y = []
    output_label = range(min_class_label, max_class_label+1)
    for i in xrange(np.size(max_prob)):
            class_label = np.where(class_prob == max_prob[i])[0]
            #print class_label
            predicted_y.append(output_label[class_label[0]])
    
    #print "predicted y :", predicted_y
    #print "Actual y:", y
    accuracy = Utils.calculate_accuracy(np.array(y), np.array(predicted_y))
    f_score_mean, f_score_std = Utils.calculate_average_F1score(np.array(y), np.array(predicted_y), min_class_label, max_class_label)
    return (accuracy, f_score_mean, f_score_std)
Ejemplo n.º 2
0
 def Estep(x, w, a, b):
     p = Utils.logistic_transformation(w, x)
     log_p_a =np.log(p) + np.log(a)
     log_p_ab = np.log(p*a + (1-p)*b)
     log_ycap = log_p_a - log_p_ab
     ycap = np.exp(log_ycap)
     return ycap
Ejemplo n.º 3
0
def test(X, y, learned_params):

    N = np.shape(X)[0]  # no of instances
    X = np.append(
        np.ones((N, 1)), X, 1
    )  # appending a column of ones as bias (used in logistic regression weights prediction)
    F = np.shape(X)[1]  # no of features+1

    p_old = 1
    class_prob = []
    for w in learned_params.keys():

        p = Utils.logistic_transformation(learned_params[w], X)
        class_prob.append(p_old - p)
        p_old = p
    class_prob.append(p_old)
    max_prob = np.max(class_prob, 0)

    cs = np.array(class_prob)
    cs = np.reshape(cs, cs.size, order="F").reshape(np.shape(cs)[1], np.shape(cs)[0])
    second_max = np.where(cs.argsort() == cs.shape[-1] - 1)[1] + 1
    cs_prob = cs[np.where(cs.argsort() == cs.shape[-1] - 1)]  #

    predicted_y = []
    arbitrary_answer = []
    output_label = range(min_class_label, max_class_label + 1)
    for i in xrange(np.size(max_prob)):
        class_label = np.where(class_prob == max_prob[i])[0]
        predicted_y.append(output_label[class_label[0]])
        arbitrary_answer.append(
            str(output_label[class_label[0]])
            + " prob "
            + str(max_prob[i])
            + " second prob "
            + str(cs_prob[i])
            + "second class"
            + str(second_max[i])
        )

    # print "predicted y :", predicted_y
    # print "Actual y:", y
    accuracy = Utils.calculate_accuracy(np.array(y), np.array(predicted_y))
    f_score_mean, f_score_std = Utils.calculate_average_F1score(
        np.array(y), np.array(predicted_y), min_class_label, max_class_label
    )
    return (accuracy, f_score_mean, f_score_std)