Exemple #1
0
print("Prediction: x1 = 0, x2 = 1, ... -> y = %d" %
      (tm.predict(np.array([[0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0]]))))
print("Prediction: x1 = 0, x2 = 0, ... -> y = %d" %
      (tm.predict(np.array([[0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0]]))))
print("Prediction: x1 = 1, x2 = 1, ... -> y = %d" %
      (tm.predict(np.array([[1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0]]))))

print("\nLet's try to get clauses....")

NUM_FEATURES = len(X_train[0])

print('Num Clauses:', NUM_CLAUSES)
print('Num Classes: ', len(CLASSES), ' : ', CLASSES)
print('Num Features: ', NUM_FEATURES)

for cur_clause in range(NUM_CLAUSES):
    for cur_cls in CLASSES:
        this_clause = ''
        for f in range(NUM_FEATURES * 2):
            action = tm.ta_action(int(cur_cls), cur_clause, f)
            if action == 1:
                if this_clause != '':
                    this_clause += 'AND '
                if f < NUM_FEATURES:
                    this_clause += 'F' + str(f) + ' '
                else:
                    this_clause += '-|F' + str(f - NUM_FEATURES) + ' '

        print('CLASS :', cur_cls, ' - CLAUSE ', cur_clause, ' : ', this_clause)
    print('\n\n')
    #print('\nsplits ready:',x_train.shape, x_test.shape)
    tm = MultiClassTsetlinMachine(NUM_CLAUSES, T, s)
    tm.fit(x_train, y_train, epochs=TRAIN_EPOCHS, incremental=True)
    print('\nfit done')
    result[r] = 100 * (tm.predict(x_test) == y_test).mean()
    feature_vector = np.zeros(NUM_FEATURES * 2)
    for cur_cls in CLASSES:
        for cur_clause in range(NUM_CLAUSES):
            if cur_clause % 2 == 0:
                clause_type = 'positive'
            else:
                clause_type = 'negative'
            this_clause = ''
            for f in range(0, NUM_FEATURES):
                action_plain = tm.ta_action(int(cur_cls), cur_clause, f)
                action_negated = tm.ta_action(int(cur_cls), cur_clause,
                                              f + NUM_FEATURES)
                feature_vector[f] = action_plain
                feature_vector[f + NUM_FEATURES] = action_negated
                feature_count_plain[f] += action_plain
                feature_count_negated[f] += action_negated
                if action_plain == 0 and action_negated == 0:
                    feature_count_ignore += 1
                feature_count_contradiction += action_plain and action_negated
                if (cur_cls % 2 == 0):
                    feature_count_plain_positive[f] += action_plain
                    feature_count_negated_positive[f] += action_negated
                else:
                    feature_count_plain_negative[f] += action_plain
                    feature_count_negated_negative[f] += action_negated
Exemple #3
0
    print(
        "#%d AccuracyTrain: %.2f%% AccuracyTest: %.2f%% F1-Score: %.2f%%  Training: %.2fs Testing: %.2fs"
        % (i + 1, result2, result1, f1 * 100, stop_training - start_training,
           stop_testing - start_testing))

#%%%%%%%%%%%%%%%%% To extrat the clause with its literal of particular class %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
tm1.set_state(ta_state)

#Save trained model with highest testing accuracy
np.savez_compressed("laptopModel.npz", ta_state)

#Load trained TM to evaulate results
ta_state = np.load("laptopModel.npz")['arr_0']

tm1.set_state(ta_state)
print(tm1.predict(X_final4[1990:1991, :]))
number_of_features = 4212
print("\nClass 2 Positive Clauses:\n")
for j in range(
        0, 300, 2
):  #0 is negative, 1 is neutral and 2 is positive class (change accordingly)
    print("Clause #%d: " % (j), end=' ')
    l = []
    for k in range(number_of_features * 2):
        if tm1.ta_action(1, j, k) == 1:
            if k < number_of_features:
                l.append(" x%d" % (k))
            else:
                continue
    print(" ∧ ".join(l))