for s in sigma: for a in alpha: # Sharpen test data with various sigma (for Gaussian filter) and alpha value combinations X_test_sharpen = process_data.sharpen(X, s, a) pred_dyna_sharpen = model.predict(np.expand_dims(X_test_sharpen, axis=2), batch_size=32) print ">>> sigma={}, alpha={:.2f}".format(s, a) print accuracy_score(y, np.argmax(pred_dyna_sharpen, axis=1)) print confusion_matrix(y, np.argmax(pred_dyna_sharpen, axis=1)) # Load all test data (* dynamic and static data are mixed.) X_test = process_data.load_x("test") y_test = process_data.load_y("test") # Set seed to ensure reproducibility of the paper. seed = 818 # Static (4-sitting, 5-standing, 6-laying) test data are selected and # split it in two, first & second, in order to determine # sigma & alpha values for test data sharpening. random.seed(seed) stat_1 = np.where(y_test == 4)[0] stat_1_first = random.sample(stat_1, int(len(stat_1) * 0.5)) stat_1_second = list(set(stat_1) - set(stat_1_first))
import keras.backend as K import process_data ''' See paper: Sensors 2018, 18(4), 1055; https://doi.org/10.3390/s18041055 "Divide and Conquer-Based 1D CNN Human Activity Recognition Using Test Data Sharpening" by Heeryon Cho & Sang Min Yoon This code learns dynamic HAR model given in Figure 10. (Sensors 2018, 18(4), 1055, page 13 of 24) ''' # Load all train and test data (* dynamic and static data are mixed.) X_train_all = process_data.load_x( "train" ) # at this stage, the data includes both dynamic and static HAR data y_train_all = process_data.load_y("train") X_test_all = process_data.load_x("test") y_test_all = process_data.load_y("test") # -------------------------------------- # Only dynamic HAR data are selected # -------------------------------------- # Select dynamic HAR train data dynamic_1 = np.where(y_train_all == 1)[0] dynamic_2 = np.where(y_train_all == 2)[0] dynamic_3 = np.where(y_train_all == 3)[0]
import process_data from sklearn.linear_model import LogisticRegression from sklearn.svm import SVC from sklearn.neural_network import MLPClassifier from sklearn.tree import DecisionTreeClassifier from sklearn.metrics import accuracy_score from sklearn.metrics import confusion_matrix # Load all train and test data (* dynamic and static data are mixed.) X_train = process_data.load_x("train") y_train = process_data.load_y("train") X_test = process_data.load_x("test") y_test = process_data.load_y("test") print "==================================" print " ACCURACY OF OTHER ML CLASSIFIERS" print "==================================" # Build a logistic regression classifier and predict clf_lr = LogisticRegression(random_state=0) clf_lr.fit(X_train, y_train) pred_lr = clf_lr.predict(X_test) print "\n--- Logistic Regression Classifier ---" print accuracy_score(y_test, pred_lr) print confusion_matrix(y_test, pred_lr)