Exemplo n.º 1
0
def apply_SA(trainX, trainY, testX, testY, window, source_pos, target_pos):
    # Decision Tree
    print("\n Subspace Alignment (Fernando et al., 2013) ")
    classifier = SubspaceAlignedClassifier(loss="dtree")
    classifier.fit(trainX, trainY, testX)
    pred_naive = classifier.predict(testX)
    acc_DT_SA, acc_DT_SA_INFO = check_accuracy(testY, pred_naive)
    # Logistic Regression
    print("\n Subspace Alignment (Fernando et al., 2013) ")
    classifier = SubspaceAlignedClassifier(loss="logistic")
    classifier.fit(trainX, trainY, testX)
    pred_naive = classifier.predict(testX)
    acc_LR_SA, acc_LR_SA_INFO = check_accuracy(testY, pred_naive)
    # Naive Bayes Bernoulli        
    print("\n Subspace Alignment (Fernando et al., 2013) ")
    classifier = SubspaceAlignedClassifier(loss="berno")
    classifier.fit(trainX, trainY, testX)
    pred_naive = classifier.predict(testX)
    acc_NB_SA, acc_NB_SA_INFO = check_accuracy(testY, pred_naive)
    #
    return pd.DataFrame(
            [{ 
            'window': window,
            'source_position': source_pos,
            'target_position': target_pos,

            'acc_LR_SA': acc_LR_SA,
            'acc_LR_SA_INFO': str(acc_LR_SA_INFO),                                              
            'acc_DT_SA': acc_DT_SA,
            'acc_DT_SA_INFO': str(acc_DT_SA_INFO),                                                                            
            'acc_NB_SA': acc_NB_SA,
            'acc_NB_SA_INFO': str(acc_NB_SA_INFO),                                                
            }]
        )
Exemplo n.º 2
0
def test_predict():
    """Test for making predictions."""
    X = rnd.randn(10, 2)
    y = np.hstack((-np.ones((5, )), np.ones((5, ))))
    Z = rnd.randn(10, 2) + 1
    clf = SubspaceAlignedClassifier()
    clf.fit(X, y, Z)
    u_pred = clf.predict(Z)
    labels = np.unique(y)
    assert len(np.setdiff1d(np.unique(u_pred), labels)) == 0
Exemplo n.º 3
0
        if v == testY[i]:
            p += 1
    acc = p * 100 / len(result)
    # print(result)
    #print("ACC:{0}%, Total:{1}/{2} with positive {3}".format(acc, len(result), len(testY), p))
    return acc  #, check_accuracy(result, testY)


########################
#### WITHOUT TL ########
########################
# Decision Tree
#print("\n Subspace Alignment (Fernando et al., 2013) ")
classifier_SA = SubspaceAlignedClassifier(loss="dtree")
classifier_SA.fit(trainX, trainY, testX)
pred_naive = classifier_SA.predict(testX)
acc_DT_SA = checkAccuracy(testY, pred_naive)
prob = classifier_SA.predict_proba(testX)
#print(prob)
print("acc_DT_SA:", acc_DT_SA)
# Logistic Regression
#print("\n Subspace Alignment (Fernando et al., 2013) ")
classifier = SubspaceAlignedClassifier(loss="logistic")
classifier.fit(trainX, trainY, testX)
pred_naive = classifier.predict(testX)
prob = classifier.predict_proba(testX)
#print(prob)
acc_LR_SA = checkAccuracy(testY, pred_naive)
print("acc_LR_SA:", acc_LR_SA)

#i = 0
Exemplo n.º 4
0
X_s_t = np.vstack([X_s, X_t_init])
y_s_t = np.concatenate([y_s, y_t_init])

clf_sup = RandomForestClassifier().fit(X_s_t, y_s_t)
pred = clf_sup.predict(X_test)
print(accuracy_score(y_test, pred))

clf_sup = MLPClassifier().fit(X_s_t, y_s_t)
pred = clf_sup.predict(X_test)
print(accuracy_score(y_test, pred))

clf_sup = DecisionTreeClassifier().fit(X_s_t, y_s_t)
pred = clf_sup.predict(X_test)
print(accuracy_score(y_test, pred))

clf_sup = SVC().fit(X_s_t, y_s_t)
pred = clf_sup.predict(X_test)
print(accuracy_score(y_test, pred))

# ENCO learning
clf_sup = encolearning.EnCoLearning(iteration=20).fit(X_s_t, y_s_t, X_t)
pred = clf_sup.predict(X_test)
print(accuracy_score(y_test, pred))

#%% TCA
clf_tca = SubspaceAlignedClassifier(num_components=10)
clf_tca.fit(X_s, y_s.reshape(len(y_s), ), X_t)

pred = clf_tca.predict(X_test)
acc = accuracy_score(y_test, pred)
print(acc)