def run_test(training_subjects, test_subjects, C=1, kernel='rbf', degree=1, gamma='auto', data_source='', permutate_xyz=False, activities=None, show_confusion=False): X_train, Y_train = data_util.load_training_data( training_subjects, data_source + '_' + ''.join(training_subjects) + '_svmloo4_' + CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities, permutate_xyz=permutate_xyz) X_test, Y_test = data_util.load_testing_data( test_subjects, data_source + '_' + ''.join(training_subjects) + '_svmloo4_' + CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities) print('X_train size: {}'.format(len(X_train))) print('X_test size: {}'.format(len(X_test))) print() accuracy_results = [] fscore_results = [] model = SVC(C=C, kernel=kernel, gamma=gamma, degree=degree, random_state=int(time.time())) model.fit(X_train, Y_train) predictions = model.predict(X_test) accuracy = accuracy_score(Y_test, predictions) accuracy_results.append(accuracy) fscore = f1_score(Y_test, predictions, average='weighted') fscore_results.append(fscore) if show_confusion: cm = confusion_matrix(Y_test, predictions) c_matrix.append(cm) return accuracy, fscore
def run_test(n_estimators=50, data_source='', activities=None, permutate_xyz=False, show_confusion=False): X_train, Y_train = data_util.load_training_data( CONFIG.TRAINING_DATA_SOURCE_SUBJECT, data_source + '_' + ''.join(CONFIG.TRAINING_DATA_SOURCE_SUBJECT) + '_rfloo_' + CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities, permutate_xyz=permutate_xyz) X_test, Y_test = data_util.load_testing_data( CONFIG.TESTING_DATA_SOURCE_SUBJECT, data_source + '_' + ''.join(CONFIG.TRAINING_DATA_SOURCE_SUBJECT) + '_rfloo_' + CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities) print('X_train size: {}'.format(len(X_train))) print('X_test size: {}'.format(len(X_test))) print() accuracy_results = [] fscore_results = [] model = RandomForestClassifier(n_estimators=n_estimators, n_jobs=-1) model.fit(X_train, Y_train) predictions = model.predict(X_test) accuracy = accuracy_score(Y_test, predictions) accuracy_results.append(accuracy) fscore = f1_score(Y_test, predictions, average='weighted') fscore_results.append(fscore) if show_confusion: cm = confusion_matrix(Y_test, predictions) plt.figure() plot_util.plot_confusion_matrix(cm, [ activity_encoding.INT_TO_ACTIVITY_MAPPING[i] for i in sorted([ activity_encoding.ACTIVITY_TO_INT_MAPPING[a] for a in activities ]) ]) plt.show() return accuracy, fscore
def run_test(n_neighbors=50, data_source='', activities=None, permutate_xyz=False): X_train, Y_train = data_util.load_training_data( CONFIG.TRAINING_DATA_SOURCE_SUBJECT, CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities, permutate_xyz=permutate_xyz) X_test, Y_test = data_util.load_testing_data( CONFIG.TESTING_DATA_SOURCE_SUBJECT, CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities) print('X_train size: {}'.format(len(X_train))) print('X_test size: {}'.format(len(X_test))) print() accuracy_results = [] fscore_results = [] model = KNeighborsClassifier(n_neighbors=n_neighbors, n_jobs=-1, weights='uniform') model.fit(X_train, Y_train) predictions = model.predict(X_test) accuracy = accuracy_score(Y_test, predictions) accuracy_results.append(accuracy) fscore = f1_score(Y_test, predictions, average='weighted') fscore_results.append(fscore) # cm = confusion_matrix( # Y_test, # predictions # ) # # plt.figure() # plot_util.plot_confusion_matrix( # cm, # [activity_encoding.INT_TO_ACTIVITY_MAPPING[i] for i in sorted([activity_encoding.ACTIVITY_TO_INT_MAPPING[a] for a in activities])] # ) # plt.show() return accuracy, fscore
def run_training(c=100, gamma=0.5, kernel='rbf', data_source=''): print('Training SVM model for Real Time Monitoring..') print('Cost: {}'.format(c)) print('Gamma: {}'.format(gamma)) print('Kernel: {}'.format(kernel)) print('Data Source: {}'.format(data_source)) print() print() print() X_train, Y_train = data_util.load_training_data( CONFIG.REAL_TIME_MONITORING_TRAINING_DATA_SOURCE_SUBJECT, CONFIG.MODEL_NAMES['real_time_monitoring_minmax_scaler'], source=data_source) model = SVC(C=c, kernel=kernel, gamma=gamma) model.fit(X_train, Y_train) return model
def run_training(n_estimators=50, data_source=''): print('Training RF model for Real Time Monitoring..') print('Number of Estimators: {}'.format(n_estimators)) print('Data Source: {}'.format(data_source)) print('Training Subjects: {}'.format(CONFIG.REAL_TIME_MONITORING_TRAINING_DATA_SOURCE_SUBJECT)) print() print() print() X_train, Y_train = data_util.load_training_data( CONFIG.REAL_TIME_MONITORING_TRAINING_DATA_SOURCE_SUBJECT, CONFIG.MODEL_NAMES['real_time_monitoring_minmax_scaler'], source=data_source, ) model = RandomForestClassifier(n_estimators=n_estimators, n_jobs=-1) model.fit(X_train, Y_train) return model
def run_test(training_subjects, test_subjects, n_estimators=50, data_source='', activities=None, permutate_xyz=False, show_confusion=False): X_train, Y_train = data_util.load_training_data( training_subjects, data_source + '_' + ''.join(training_subjects) + '_rfloo2_' + CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities, permutate_xyz=permutate_xyz ) X_test, Y_test = data_util.load_testing_data( test_subjects, data_source + '_' + ''.join(training_subjects) + '_rfloo2_' + CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities ) print('X_train size: {}'.format(len(X_train))) print('X_test size: {}'.format(len(X_test))) print() accuracy_results = [] fscore_results = [] model = RandomForestClassifier(n_estimators=n_estimators, n_jobs=-1, random_state=int(time.time())) model.fit(X_train, Y_train) predictions = model.predict(X_test) accuracy = accuracy_score(Y_test, predictions) accuracy_results.append(accuracy) fscore = f1_score(Y_test, predictions, average='weighted') fscore_results.append(fscore) if show_confusion: cm = confusion_matrix(Y_test, predictions) c_matrix.append(cm) return accuracy, fscore
def run_test(C=10, kernel='rbf', degree=1, gamma='auto', data_source='', permutate_xyz=False, activities=None): print('Data Source: {}'.format(data_source)) print('Cost: {}'.format(C)) print('Gamma: {}'.format(gamma)) print('Sampling Frequency: {}'.format(CONFIG.SAMPLING_FREQUENCY)) print('Window Size: {}'.format(CONFIG.WINDOW_SIZE)) print('Training Subjects: {}'.format(CONFIG.TRAINING_DATA_SOURCE_SUBJECT)) print('Testing Subjects: {}'.format(CONFIG.TESTING_DATA_SOURCE_SUBJECT)) X_train, Y_train = data_util.load_training_data( CONFIG.TRAINING_DATA_SOURCE_SUBJECT, data_source + '_' + ''.join(CONFIG.TRAINING_DATA_SOURCE_SUBJECT) + '_svmloo_' + CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities, permutate_xyz=permutate_xyz) X_test, Y_test = data_util.load_testing_data( CONFIG.TESTING_DATA_SOURCE_SUBJECT, data_source + '_' + ''.join(CONFIG.TRAINING_DATA_SOURCE_SUBJECT) + '_svmloo_' + CONFIG.MODEL_NAMES['minmax_scaler'], source=data_source, activities=activities) print('X_train size: {}'.format(len(X_train))) print('X_test size: {}'.format(len(X_test))) print() accuracy_results = [] fscore_results = [] model = SVC(C=C, kernel=kernel, gamma=gamma, degree=degree, random_state=int(time.time())) model.fit(X_train, Y_train) predictions = model.predict(X_test) accuracy = accuracy_score(Y_test, predictions) accuracy_results.append(accuracy) fscore = f1_score(Y_test, predictions, average='weighted') fscore_results.append(fscore) # cm = confusion_matrix( # Y_test, # predictions # ) # # plt.figure() # plot_util.plot_confusion_matrix( # cm, # [activity_encoding.INT_TO_ACTIVITY_MAPPING[i] for i in sorted([activity_encoding.ACTIVITY_TO_INT_MAPPING[a] for a in activities])] # ) # plt.show() return accuracy, fscore
accuracy = accuracy_score(Y_test, predictions) fscore = f1_score(Y_test, predictions, average='weighted') print() print() print('Sampling Frequency: {}'.format(CONFIG.SAMPLING_FREQUENCY)) print('Window Size: {}'.format(CONFIG.WINDOW_SIZE)) print('Accuracy: {}'.format(accuracy)) print('F1 Score: {}'.format(fscore)) print() print('==================================================================') if __name__ == '__main__': X_train, Y_train = data_util.load_training_data( CONFIG.TRAINING_DATA_SOURCE_SUBJECT, CONFIG.MODEL_NAMES['minmax_scaler'], onehot=True) X_test, Y_test = data_util.load_testing_data( CONFIG.TESTING_DATA_SOURCE_SUBJECT, CONFIG.MODEL_NAMES['minmax_scaler'], onehot=False) alpha = [0.01] max_epoch = [2000] n_hidden_layer = [1] n_neuron_hidden_layer = [100] for a in alpha: for epoch in max_epoch: for layer in n_hidden_layer: