def test_actual_results(params): """Test that the actual results are the expected ones.""" knn = KNeighborsClassifier(n_neighbors=1, **params) proba_actual = knn.fit(X, y).predict_proba(X_test) pred_actual = knn.predict(X_test) np.testing.assert_array_equal(proba_actual, y_proba) np.testing.assert_array_equal(pred_actual, y)
file_train = PATH + str(dataset) + "/" + str(dataset) + "_TRAIN.tsv" file_test = PATH + str(dataset) + "/" + str(dataset) + "_TEST.tsv" for i, (dataset, warping_window) in enumerate(zip(dataset_list, warping_window_list)): print("Dataset: {}".format(dataset)) file_train = PATH + str(dataset) + "/" + str(dataset) + "_TRAIN.tsv" file_test = PATH + str(dataset) + "/" + str(dataset) + "_TEST.tsv" train = np.genfromtxt(fname=file_train, delimiter="\t", skip_header=0) test = np.genfromtxt(fname=file_test, delimiter="\t", skip_header=0) X_train, y_train = train[:, 1:], train[:, 0] X_test, y_test = test[:, 1:], test[:, 0] clf_ed = KNeighborsClassifier(metric='euclidean') clf_dtw = KNeighborsClassifier(metric='dtw') clf_dtw_w = KNeighborsClassifier(metric='dtw_sakoechiba', metric_params={'window_size': warping_window}) transformer = FunctionTransformer(func=lambda x: x.toarray(), validate=False, check_inverse=False) knn = KNeighborsClassifier(n_neighbors=1, metric='boss') # Euclidean Distance error_ed = 1 - clf_ed.fit(X_train, y_train).score(X_test, y_test) print('Accuracy ED: ', 1 - error_ed ) print("Error rate with Euclidean Distance: {0:.4f}".format(error_ed)) error_ed_list.append(error_ed)
# file_train = PATH + str(dataset) + "/" + str(dataset) + "_TRAIN" # file_test = PATH + str(dataset) + "/" + str(dataset) + "_TEST" # train = np.genfromtxt(fname=file_train, delimiter="\t", skip_header=0) # test = np.genfromtxt(fname=file_test, delimiter="\t", skip_header=0) # X_train, y_train = train[:, 1:], train[:, 0] # X_test, y_test = test[:, 1:], test[:, 0] direc = '/Users/apple/Desktop/dev/projectlife/data/UCR' summaries_dir = '/Users/apple/Desktop/dev/projectlife/data/logs' """Load the data""" X_train, X_test, y_train, y_test = load_data(direc, dataset='Projectlife') clf_ed = KNeighborsClassifier(metric='euclidean') clf_dtw = KNeighborsClassifier(metric='dtw') clf_dtw_w = KNeighborsClassifier( metric='dtw_sakoechiba', metric_params={'window_size': warping_window}) # Euclidean Distance error_ed = 1 - clf_ed.fit(X_train, y_train).score(X_test, y_test) print("Error rate with Euclidean Distance: {0:.4f}".format(error_ed)) error_ed_list.append(error_ed) # Dynamic Time Warping error_dtw = 1 - clf_dtw.fit(X_train, y_train).score(X_test, y_test) print("Error rate with Dynamic Time Warping: {0:.4f}".format(error_dtw)) error_dtw_list.append(error_dtw) print(clf_dtw.predict(X_test))
dtw_sakoechiba, path_sakoechiba = dtw(X_train[i], X_train[j], dist='square', method='sakoechiba',options={'window_size': warping_window}, return_path=True) DTW_sakoe_train.append(dtw_sakoechiba) DTW_sakoe_train = np.array(DTW_sakoe_train) DTW_sakoe_train.resize(y_train.shape[0],int(len(DTW_sakoe_train)/y_train.shape[0])) DTW_sakoe_test = np.array(DTW_sakoe_test) DTW_sakoe_test.resize(y_test.shape[0],int(len(DTW_sakoe_test)/y_test.shape[0])) app.append(1-clf.fit(DTW_Classic_train, y_train).score(DTW_Classic_test,y_test)) app.append(1-gpc.fit(DTW_Classic_train, y_train).score(DTW_Classic_test,y_test)) app.append(1-clf.fit(DTW_sakoe_train, y_train).score(DTW_sakoe_test,y_test)) app.append(1-gpc.fit(DTW_sakoe_train, y_train).score(DTW_sakoe_test,y_test)) print('1NN started') clf_dtw_w = KNeighborsClassifier(metric='dtw_sakoechiba',metric_params={'window_size': warping_window}) error_dtw_w = 1- clf_dtw_w.fit(X_train, y_train).score(X_test, y_test) app.append(error_dtw_w) print('DTW-DTWR started') from sklearn import svm from tslearn.preprocessing import TimeSeriesScalerMeanVariance from pyts.metrics import dtw from pyts.metrics.dtw import (cost_matrix, accumulated_cost_matrix, _return_path, _multiscale_region) test_DTW_DTWR = np.concatenate((DTW_sakoe_test,DTW_Classic_test),axis=1) train_DTW_DTWR = np.concatenate((DTW_Classic_train,DTW_sakoe_train),axis=1)