def start_predict_offline(): if file_text.get("1.0", END) == []: messagebox.showerror("Error", "Please select test file!") return else: test_file = file_text.get("1.0", END) test_file = test_file.split("\n")[0] if model_text.get("1.0", END) == []: messagebox.showerror("Error", "Please select test model!") return else: offline_model = model_text.get("1.0", END) offline_model = offline_model.split("\n")[0] test_config = pd.read_csv(offline_model.split(".")[0] + ".txt", sep=" ") test_config.columns = ['param', 'value'] test_config = pd.DataFrame(data=[test_config['value'].values], columns=test_config['param'].values) if test_config['filter'].values[0] == '1': filter_order = int(test_config['order'].values[0]) filter_fc = test_config['fc'].values[0] filter_type = test_config['type'].values[0] filter_fs = int(test_config['fs'].values[0]) if filter_type == "bandpass" or filter_type == "bandstop": fc = filter_fc.split("[")[1].split("]")[0].split(",") filter_fc = [int(fc[0]), int(fc[1])] else: filter_fc = int(filter_fc) if test_config['smooth'].values[0] == '1': smooth_wl = int(test_config['windowlength'].values[0]) smooth_po = int(test_config['polyorder'].values[0]) smooth_mode = test_config['mode'].values[0] if test_config['eliminate'].values[0] == '1': elim_thre = int(test_config['threshold'].values[0]) elim_ws = int(test_config['windowsize'].values[0]) elim_base = int(test_config['baseon'].values[0]) if test_config['energy'].values[0] == '1': eng_bs = int(test_config['bandsize'].values[0]) eng_odr = int(test_config['odr'].values[0]) seg_size = int(test_config['segsize'].values[0]) files = pd.read_csv(test_file) final_features = [] labels = [] for idx, row in files.iterrows(): name = row['file'] dir = row['dir'] label = row['label'] header = row['header'] raw_data = pd.read_csv(dir + '\\' + '\\' + name, header=header, delimiter=';') ax = raw_data['ax'] ay = raw_data['ay'] az = raw_data['az'] data = np.array([ax, ay, az]).T if test_config['filter'].values[0] == '1': data = data_preprocessing.filter(data, filter_order, filter_fc, filter_type, filter_fs) if test_config['smooth'].values[0] == '1': smoothed_data = data_preprocessing.smooth(data, smooth_wl, smooth_po, smooth_mode) if test_config['eliminate'].values[0] == '1': new_smoothed_data, data = data_preprocessing.eliminate_abnormal_value(smoothed_data, data, elim_ws, elim_thre, elim_base) else: data = smoothed_data num = int(test_config['mean'].values[0]) + int(test_config['std'].values[0]) + int(test_config['min'].values[0]) + int(test_config['max'].values[0]) + int(test_config['rms'].values[0]) for col in range(3): feature = np.empty([int(len(data[:, col])/seg_size), num]) new_feature = [] for i in range(int(len(data[:, col])/seg_size)): window = data[i*seg_size:(i+1)*seg_size, col] k = 0 if int(test_config['mean'].values[0]): feature[i, k] = feature_extraction.mean(window) k += 1; if int(test_config['std'].values[0]): feature[i, k] = feature_extraction.std(window) k += 1; if int(test_config['min'].values[0]): feature[i, k] = feature_extraction.getmin(window) k += 1; if int(test_config['max'].values[0]): feature[i, k] = feature_extraction.getmax(window) k += 1; if int(test_config['rms'].values[0]): feature[i, k] = feature_extraction.rms(window) k += 1; if int(test_config['energy'].values[0]): energy_vector = feature_extraction.energy_for_each_freq_band(window, eng_odr, eng_bs) temp = np.append(feature[i, :], energy_vector) new_feature.append(temp) else: new_feature.append(feature[i, :]) new_feature = np.array(new_feature) #print("new_feature: ", new_feature.shape) if col == 0: ex_feature = new_feature else: ex_feature = np.append(ex_feature, new_feature, axis=1) #print("ex_feature: ", ex_feature.shape) num_features = ex_feature.shape[1] // 3 num_axis = int(test_config['X'].values[0]) +int(test_config['Y'].values[0]) + int(test_config['Z'].values[0]) trn_feature = np.empty([ex_feature.shape[0], num_axis*num_features]) k = 0 if int(test_config['X'].values[0]) == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, 0:num_features] k += 1 if int(test_config['Y'].values[0]) == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, num_features:num_features*2] k += 1 if int(test_config['Z'].values[0]) == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, num_features*2:num_features*3] k += 1; final_features.extend(trn_feature) for i in range(len(trn_feature)): labels.append(label) final_features = np.array(final_features) labels = np.array(labels) test_model = pickle.load(open(offline_model, 'rb')) pred = test_model.predict(final_features) acc_tst = accuracy_score(labels, pred) lb_acc_tst = Label(offline, text=acc_tst, font=("Helvetica", "12", "bold italic")) lb_acc_tst.place(relx=0.6, rely=0.28)
def startTrain(): model = en_model.get() if model == "": messagebox.showerror("Error", "Please select the training model!") return trn_ratio = var_ratio.get() file_idx = trn_files.curselection() filename = "" if file_idx == (): messagebox.showerror("Error", "Please select the training files!") return else: filename = trn_files.get(file_idx) # Detect if a filter is used and pass all filter parameter to the function if var_filter.get() == 1: if en_order.get() != "" and en_fc.get() != "" and en_type.get() != "" and en_fs.get() != "": filter_order = int(en_order.get()) filter_fc = en_fc.get() filter_type = en_type.get() filter_fs = int(en_fs.get()) if filter_type == "bandpass" or filter_type == "bandstop": if filter_fc.split("[")[0] != "": messagebox.showerror("Error", "The form of fc should be like \'[fc1, fc2]\'!") return fc = filter_fc.split("[")[1].split("]")[0].split(",") filter_fc = [int(fc[0]), int(fc[1])] else: filter_fc = int(filter_fc) else: messagebox.showerror("Error", "Please set all the parameters of Filter!") return # Detect if smoothing is used and pass all filter parameter to the function if var_smooth.get() == 1: if en_wl.get() != "" and en_po.get() != "" and en_mode.get() != "": smooth_wl = int(en_wl.get()) smooth_po = int(en_po.get()) smooth_mode = en_mode.get() if smooth_wl % 2 == 0: messagebox.showerror("Error", "The window length should be odd!") return else: messagebox.showerror("Error", "Please set all the parameters of Smoothing!") return if var_elim.get() == 1: if en_thre.get() != "" and en_win.get() != "": elim_thre = int(en_thre.get()) elim_ws = int(en_win.get()) else: messagebox.showerror("Error", "Please set all the parameters of Eliminate Abnormal Data!") return if var_energy.get() == 1: if en_bs.get() != "" and en_odr.get() != "": eng_bs = int(en_bs.get()) eng_odr = int(en_odr.get()) else: messagebox.showerror("Error", "Please set all the parameters of Energy!") return if en_ws.get() != "": seg_size = int(en_ws.get()) else: messagebox.showerror("Error", "Please set the segment size!") return if var_accel_x.get() == 0 and var_accel_y.get() == 0 and var_accel_z.get() == 0: messagebox.showerror("Error", "Please select at least one axis!") return if (var_base.get() == 1 and var_accel_x.get() == 0) or (var_base.get() == 2 and var_accel_y.get() == 0) or (var_base.get() == 3 and var_accel_z.get() == 0): messagebox.showerror("Error", "Please select the axis that is select in Axis Selection!") return if var_mean.get() + var_std.get() + var_min.get() + var_max.get() + var_rms.get() + var_energy.get() == 0: messagebox.showerror("Error", "Please select at least one feature!") return save_model = filedialog.asksaveasfilename(initialdir = "/", title = "Save Model", filetypes = (("sav files","*.sav"), ("all files", "*.*"))) if save_model == "": return if len(save_model.split(".")) == 1: save_model = save_model + '.sav' ### write model configuration parts = save_model.split('/') directory = '/'.join(parts[0:len(parts)-1]) txtName = parts[len(parts)-1].split('.')[0] + '.txt' pb['value'] = 10 trn.update_idletasks() time.sleep(0.5) f = open(directory + '/' + txtName, "w+") f.write("param value\n") if var_filter.get() == 1: f.write("filter 1\n") f.write("order %d\n" %filter_order) f.write("fc %d\n" %filter_fc) f.write("type " + filter_type + "\n") f.write("fs %d\n" %filter_fs) else: f.write("filter 0\n") if var_smooth.get() == 1: f.write("smooth 1\n") f.write("windowlength %d\n" %smooth_wl) f.write("polyorder %d\n" %smooth_po) f.write("mode " + smooth_mode + "\n") else: f.write("smooth 0\n") if var_elim.get() == 1: f.write("eliminate 1\n") f.write("threshold %d\n" %elim_thre) f.write("windowsize %d\n" %elim_ws) f.write("baseon %d\n" %var_base.get()) else: f.write("eliminate 0\n") f.write("X %d\n" %var_accel_x.get()) f.write("Y %d\n" %var_accel_y.get()) f.write("Z %d\n" %var_accel_z.get()) f.write("mean %d\n" %var_mean.get()) f.write("std %d\n" %var_std.get()) f.write("min %d\n" %var_min.get()) f.write("max %d\n" %var_max.get()) f.write("rms %d\n" %var_rms.get()) f.write("energy %d\n" %var_energy.get()) if var_energy.get() == 1: f.write("bandsize %d\n" %eng_bs) f.write("odr %d\n" %eng_odr) f.write("segsize %d\n" %seg_size) f.close() files = pd.read_csv(filename) final_features = [] labels = [] num_rows = files.shape[0] for idx, row in files.iterrows(): name = row['file'] dir = row['dir'] label = row['label'] header = row['header'] raw_data = pd.read_csv(dir + '\\' + '\\' + name, header=header, delimiter=';') ax = raw_data['ax'] ay = raw_data['ay'] az = raw_data['az'] data = np.array([ax, ay, az]).T if var_filter.get() == 1: data = data_preprocessing.filter(data, filter_order, filter_fc, filter_type, filter_fs) if var_smooth.get() == 1: smoothed_data = data_preprocessing.smooth(data, smooth_wl, smooth_po, smooth_mode) if var_elim.get() == 1: new_smoothed_data, data = data_preprocessing.eliminate_abnormal_value(smoothed_data, data, elim_ws, elim_thre, var_base.get()) else: data = smoothed_data num = var_mean.get() + var_std.get() + var_min.get() + var_max.get() + var_rms.get() # extract features from x y z axis respectively for col in range(3): # number of feature column is determined by selected features feature = np.empty([int(len(data[:, col])/seg_size), num]) new_feature = [] for i in range(int(len(data[:, col])/seg_size)): # window is one group of data whose size is seg_size window = data[i*seg_size:(i+1)*seg_size, col] # keep track of features k = 0 if var_mean.get(): feature[i, k] = feature_extraction.mean(window) k += 1; if var_std.get(): feature[i, k] = feature_extraction.std(window) k += 1; if var_min.get(): feature[i, k] = feature_extraction.getmin(window) k += 1; if var_max.get(): feature[i, k] = feature_extraction.getmax(window) k += 1; if var_rms.get(): feature[i, k] = feature_extraction.rms(window) k += 1; if var_energy.get(): energy_vector = feature_extraction.energy_for_each_freq_band(window, eng_odr, eng_bs) temp = np.append(feature[i, :], energy_vector) new_feature.append(temp) else: new_feature.append(feature[i, :]) # features on only one axis new_feature = np.array(new_feature) #print("new_feature: ", new_feature.shape) # save features of all axes to ex_feature if col == 0: ex_feature = new_feature else: ex_feature = np.append(ex_feature, new_feature, axis=1) # print("ex_feature: ", ex_feature.shape) num_features = ex_feature.shape[1] // 3 num_axis = var_accel_x.get() + var_accel_y.get() + var_accel_z.get() trn_feature = np.empty([ex_feature.shape[0], num_axis*num_features]) k = 0 if var_accel_x.get() == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, 0:num_features] k += 1 if var_accel_y.get() == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, num_features:num_features*2] k += 1 if var_accel_z.get() == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, num_features*2:num_features*3] k += 1; final_features.extend(trn_feature) for i in range(len(trn_feature)): labels.append(label) pb['value'] = 10 + (idx+1)/num_rows * 80 trn.update_idletasks() time.sleep(0.5) final_features = np.array(final_features) labels = np.array(labels) #print(final_features.shape) #print(labels.shape) #np.savetxt('fea', final_features, delimiter=',') #print(labels) # f = open(directory + '/' + txtName, "w+") # f.write("param value\n") if res_text.get('1.0', END) != []: res_text.delete('1.0', END) if model == "Random Forest": acc_trn, acc_tst, acc_oob = train.RandomForest(final_features, labels, trn_ratio, save_model) res_text.insert(END, "Training accuracy: " + str(acc_trn) + '\n') res_text.insert(END, '\n' + "Testing accuracy: " + str(acc_tst) + '\n') res_text.insert(END, '\n' + "Out of bag accuracy: " + str(acc_oob) + '\n') if model == "SVM": acc_trn, acc_tst = train.SVM(final_features, labels, trn_ratio, save_model) res_text.insert(END, "Training accuracy: " + str(acc_trn) + '\n') res_text.insert(END, '\n' + "Test accuracy: " + str(acc_tst) + '\n') pb['value'] = 100 trn.update_idletasks() time.sleep(0.5)
def plotFigures(): file_idx = trn_files.curselection() filename = "" if file_idx == (): messagebox.showerror("Error", "Please select the training files!") return else: filename = trn_files.get(file_idx) if var_filter.get() == 1: if en_order.get() != "" and en_fc.get() != "" and en_type.get() != "" and en_fs.get() != "": filter_order = int(en_order.get()) filter_fc = en_fc.get() filter_type = en_type.get() filter_fs = int(en_fs.get()) if filter_type == "bandpass" or filter_type == "bandstop": if filter_fc.split("[")[0] != "": messagebox.showerror("Error", "The form of fc should be like \'[fc1, fc2]\'!") return fc = filter_fc.split("[")[1].split("]")[0].split(",") filter_fc = [int(fc[0]), int(fc[1])] else: filter_fc = int(filter_fc) else: messagebox.showerror("Error", "Please set all the parameters of Filter!") return if var_smooth.get() == 1: if en_wl.get() != "" and en_po.get() != "" and en_mode.get() != "": smooth_wl = int(en_wl.get()) smooth_po = int(en_po.get()) smooth_mode = en_mode.get() if smooth_wl % 2 == 0: messagebox.showerror("Error", "The window length should be odd!") return else: messagebox.showerror("Error", "Please set all the parameters of Smoothing!") return if var_elim.get() == 1: if en_thre.get() != "" and en_win.get() != "": elim_thre = int(en_thre.get()) elim_ws = int(en_win.get()) else: messagebox.showerror("Error", "Please set all the parameters of Eliminate Abnormal Data!") return files = pd.read_csv(filename) num_rows = files.shape[0] for idx, row in files.iterrows(): name = row['file'] dir = row['dir'] label = row['label'] header = row['header'] raw_data = pd.read_csv(dir + '\\' + '\\' + name, header=header, delimiter=';') ax = raw_data['ax'] ay = raw_data['ay'] az = raw_data['az'] data = np.array([ax, ay, az]).T top = Toplevel(root) top.title("Figure " + str(idx+1) + " / " + str(num_rows)) f = Figure(figsize=(14, 8), dpi=100) sub = var_filter.get() + var_smooth.get() + var_elim.get() + 1 if var_elim.get() == 1: sub += 1 axes = f.subplots(sub, 1) if sub == 1: axes.plot(data[:, 0], label='raw ax - ' + label, alpha=0.8) axes.plot(data[:, 1], label='raw ay - ' + label, alpha=0.8) axes.plot(data[:, 2], label='raw az - ' + label, alpha=0.8) axes.legend() axes.grid() else: axes[0].plot(data[:, 0], label='raw ax - ' + label, alpha=0.8) axes[0].plot(data[:, 1], label='raw ay - ' + label, alpha=0.8) axes[0].plot(data[:, 2], label='raw az - ' + label, alpha=0.8) axes[0].legend() axes[0].grid() index = 1 if var_filter.get() == 1: data = data_preprocessing.filter(data, filter_order, filter_fc, filter_type, filter_fs) axes[index].plot(data[:, 0], label='filtered ax - ' + label, alpha=0.8) axes[index].plot(data[:, 1], label='filtered ay - ' + label, alpha=0.8) axes[index].plot(data[:, 2], label='filtered az - ' + label, alpha=0.8) axes[index].legend() axes[index].grid() index += 1 if var_smooth.get() == 1: smoothed_data = data_preprocessing.smooth(data, smooth_wl, smooth_po, smooth_mode) axes[index].plot(smoothed_data[:, 0], label='smoothed ax - ' + label, alpha=0.8) axes[index].plot(smoothed_data[:, 1], label='smoothed ay - ' + label, alpha=0.8) axes[index].plot(smoothed_data[:, 2], label='smoothed az - ' + label, alpha=0.8) axes[index].legend() axes[index].grid() axes[index].set_ylim((-6000, 6000)) index += 1 if var_elim.get() == 1: new_smoothed_data, data = data_preprocessing.eliminate_abnormal_value(smoothed_data, data, elim_ws, elim_thre, var_base.get()) axes[index].plot(data[:, 0], label='after eliminate ax - ' + label, alpha=0.8) axes[index].plot(data[:, 1], label='after eliminate ay - ' + label, alpha=0.8) axes[index].plot(data[:, 2], label='after eliminate az - ' + label, alpha=0.8) axes[index].legend() axes[index].grid() index += 1 axes[index].plot(new_smoothed_data[:, 0], label='after eliminate smoothed ax - ' + label, alpha=0.8) axes[index].plot(new_smoothed_data[:, 1], label='after eliminate smoothed ay - ' + label, alpha=0.8) axes[index].plot(new_smoothed_data[:, 2], label='after eliminate smoothed az - ' + label, alpha=0.8) axes[index].legend() axes[index].grid() index += 1 canvas_fig = FigureCanvasTkAgg(f, top) canvas_fig.draw() canvas_fig.get_tk_widget().pack(side=BOTTOM, fill=BOTH, expand=True) toolbar = NavigationToolbar2Tk(canvas_fig, top) toolbar.update() canvas_fig._tkcanvas.pack(side=TOP, fill=BOTH, expand=True)
def extractAndSave(): file_idx = trn_files.curselection() filename = "" if file_idx == (): messagebox.showerror("Error", "Please select the training files!") return else: filename = trn_files.get(file_idx) if var_filter.get() == 1: if en_order.get() != "" and en_fc.get() != "" and en_type.get() != "" and en_fs.get() != "": filter_order = int(en_order.get()) filter_fc = en_fc.get() filter_type = en_type.get() filter_fs = int(en_fs.get()) if filter_type == "bandpass" or filter_type == "bandstop": if filter_fc.split("[")[0] != "": messagebox.showerror("Error", "The form of fc should be like \'[fc1, fc2]\'!") return fc = filter_fc.split("[")[1].split("]")[0].split(",") filter_fc = [int(fc[0]), int(fc[1])] else: filter_fc = int(filter_fc) else: messagebox.showerror("Error", "Please set all the parameters of Filter!") return if var_smooth.get() == 1: if en_wl.get() != "" and en_po.get() != "" and en_mode.get() != "": smooth_wl = int(en_wl.get()) smooth_po = int(en_po.get()) smooth_mode = en_mode.get() if smooth_wl % 2 == 0: messagebox.showerror("Error", "The window length should be odd!") return else: messagebox.showerror("Error", "Please set all the parameters of Smoothing!") return if var_elim.get() == 1: if en_thre.get() != "" and en_win.get() != "": elim_thre = int(en_thre.get()) elim_ws = int(en_win.get()) else: messagebox.showerror("Error", "Please set all the parameters of Eliminate Abnormal Data!") return if var_energy.get() == 1: if en_bs.get() != "" and en_odr.get() != "": eng_bs = int(en_bs.get()) eng_odr = int(en_odr.get()) else: messagebox.showerror("Error", "Please set all the parameters of Energy!") return if en_ws.get() != "": seg_size = int(en_ws.get()) else: messagebox.showerror("Error", "Please set the segment size!") return if var_accel_x.get() == 0 and var_accel_y.get() == 0 and var_accel_z.get() == 0: messagebox.showerror("Error", "Please select at least one axis!") return if (var_base.get() == 1 and var_accel_x.get() == 0) or (var_base.get() == 2 and var_accel_y.get() == 0) or (var_base.get() == 3 and var_accel_z.get() == 0): messagebox.showerror("Error", "Please select the axis that is select in Axis Selection!") return if var_mean.get() + var_std.get() + var_min.get() + var_max.get() + var_rms.get() + var_energy.get() == 0: messagebox.showerror("Error", "Please select at least one feature!") return save_features = filedialog.asksaveasfilename(initialdir = "/", title = "Save file", filetypes = (("numpy files","*.npy"), ("all files", "*.*"))) if save_features == "": return files = pd.read_csv(filename) final_features = [] labels = [] for idx, row in files.iterrows(): name = row['file'] dir = row['dir'] label = row['label'] header = row['header'] raw_data = pd.read_csv(dir + '\\' + '\\' + name, header=header, delimiter=';') ax = raw_data['ax'] ay = raw_data['ay'] az = raw_data['az'] data = np.array([ax, ay, az]).T if var_filter.get() == 1: data = data_preprocessing.filter(data, filter_order, filter_fc, filter_type, filter_fs) if var_smooth.get() == 1: smoothed_data = data_preprocessing.smooth(data, smooth_wl, smooth_po, smooth_mode) if var_elim.get() == 1: new_smoothed_data, data = data_preprocessing.eliminate_abnormal_value(smoothed_data, data, elim_ws, elim_thre, var_base.get()) else: data = smoothed_data num = var_mean.get() + var_std.get() + var_min.get() + var_max.get() + var_rms.get() for col in range(3): feature = np.empty([int(len(data[:, col])/seg_size), num]) new_feature = [] for i in range(int(len(data[:, col])/seg_size)): window = data[i*seg_size:(i+1)*seg_size, col] k = 0 if var_mean.get(): feature[i, k] = feature_extraction.mean(window) k += 1; if var_std.get(): feature[i, k] = feature_extraction.std(window) k += 1; if var_min.get(): feature[i, k] = feature_extraction.getmin(window) k += 1; if var_max.get(): feature[i, k] = feature_extraction.getmax(window) k += 1; if var_rms.get(): feature[i, k] = feature_extraction.rms(window) k += 1; if var_energy.get(): energy_vector = feature_extraction.energy_for_each_freq_band(window, eng_odr, eng_bs) temp = np.append(feature[i, :], energy_vector) new_feature.append(temp) else: new_feature.append(feature[i, :]) new_feature = np.array(new_feature) #print("new_feature: ", new_feature.shape) if col == 0: ex_feature = new_feature else: ex_feature = np.append(ex_feature, new_feature, axis=1) #print("ex_feature: ", ex_feature.shape) num_features = ex_feature.shape[1] // 3 num_axis = var_accel_x.get() + var_accel_y.get() + var_accel_z.get() trn_feature = np.empty([ex_feature.shape[0], num_axis*num_features]) k = 0 if var_accel_x.get() == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, 0:num_features] k += 1 if var_accel_y.get() == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, num_features:num_features*2] k += 1 if var_accel_z.get() == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, num_features*2:num_features*3] k += 1; final_features.extend(trn_feature) for i in range(len(trn_feature)): labels.append(label) final_features = np.array(final_features) labels = np.array(labels) save_labels = save_features save_features = save_features.split(".")[0] + "_features.npy" save_labels = save_labels.split(".")[0] + "_labels.npy" np.save(save_features, final_features) np.save(save_labels, labels) messagebox.showinfo("Congratulations", "Features and labels are successfully saved!")
def start_predict_online(data_online): global online_window global online_window_size global flag if model_text2.get("1.0", END) == []: messagebox.showerror("Error", "Please select test model!") return else: online_model = model_text2.get("1.0", END) online_model = online_model.split("\n")[0] if res_text2.get("1.0", END) == []: res_text2.delete("1.0", END) test_config = pd.read_csv(online_model.split(".")[0] + ".txt", sep=" ") test_config.columns = ['param', 'value'] test_config = pd.DataFrame(data=[test_config['value'].values], columns=test_config['param'].values) if test_config['filter'].values[0] == '1': filter_order = int(test_config['order'].values[0]) filter_fc = test_config['fc'].values[0] filter_type = test_config['type'].values[0] filter_fs = int(test_config['fs'].values[0]) if filter_type == "bandpass" or filter_type == "bandstop": fc = filter_fc.split("[")[1].split("]")[0].split(",") filter_fc = [int(fc[0]), int(fc[1])] else: filter_fc = int(filter_fc) if test_config['smooth'].values[0] == '1': smooth_wl = int(test_config['windowlength'].values[0]) smooth_po = int(test_config['polyorder'].values[0]) smooth_mode = test_config['mode'].values[0] if test_config['eliminate'].values[0] == '1': elim_thre = int(test_config['threshold'].values[0]) elim_ws = int(test_config['windowsize'].values[0]) elim_base = int(test_config['baseon'].values[0]) if test_config['energy'].values[0] == '1': eng_bs = int(test_config['bandsize'].values[0]) eng_odr = int(test_config['odr'].values[0]) seg_size = int(test_config['segsize'].values[0]) #print(flag) if flag == False: online_window = [] online_window_size = seg_size * 1.5 flag = True if online_window_size == 0: # test_config = pd.read_csv(online_model.split(".")[0] + ".txt", sep=" ") # test_config.columns = ['param', 'value'] # test_config = pd.DataFrame(data=[test_config['value'].values], columns=test_config['param'].values) # if test_config['filter'].values[0] == '1': # filter_order = int(test_config['order'].values[0]) # filter_fc = test_config['fc'].values[0] # filter_type = test_config['type'].values[0] # filter_fs = int(test_config['fs'].values[0]) # if filter_type == "bandpass" or filter_type == "bandstop": # fc = filter_fc.split("[")[1].split("]")[0].split(",") # filter_fc = [int(fc[0]), int(fc[1])] # else: # filter_fc = int(filter_fc) # if test_config['smooth'].values[0] == '1': # smooth_wl = int(test_config['windowlength'].values[0]) # smooth_po = int(test_config['polyorder'].values[0]) # smooth_mode = test_config['mode'].values[0] # if test_config['eliminate'].values[0] == '1': # elim_thre = int(test_config['threshold'].values[0]) # elim_ws = int(test_config['windowsize'].values[0]) # elim_base = int(test_config['baseon'].values[0]) # if test_config['energy'].values[0] == '1': # eng_bs = int(test_config['bandsize'].values[0]) # eng_odr = int(test_config['odr'].values[0]) # seg_size = int(test_config['segsize'].values[0]) data = np.array(online_window) #print(data) #final_features = [] if test_config['filter'].values[0] == '1': data = data_preprocessing.filter(data, filter_order, filter_fc, filter_type, filter_fs) if test_config['smooth'].values[0] == '1': smoothed_data = data_preprocessing.smooth(data, smooth_wl, smooth_po, smooth_mode) if test_config['eliminate'].values[0] == '1': new_smoothed_data, data = data_preprocessing.eliminate_abnormal_value(smoothed_data, data, elim_ws, elim_thre, elim_base) if data.shape[0] >= seg_size: num = int(test_config['mean'].values[0]) + int(test_config['std'].values[0]) + int(test_config['min'].values[0]) + int(test_config['max'].values[0]) + int(test_config['rms'].values[0]) for col in range(3): feature = np.empty([int(len(data[:, col])/seg_size), num]) new_feature = [] for i in range(int(len(data[:, col])/seg_size)): window = data[i*seg_size:(i+1)*seg_size, col] k = 0 if int(test_config['mean'].values[0]): feature[i, k] = feature_extraction.mean(window) k += 1; if int(test_config['std'].values[0]): feature[i, k] = feature_extraction.std(window) k += 1; if int(test_config['min'].values[0]): feature[i, k] = feature_extraction.getmin(window) k += 1; if int(test_config['max'].values[0]): feature[i, k] = feature_extraction.getmax(window) k += 1; if int(test_config['rms'].values[0]): feature[i, k] = feature_extraction.rms(window) k += 1; if int(test_config['energy'].values[0]): energy_vector = feature_extraction.energy_for_each_freq_band(window, eng_odr, eng_bs) temp = np.append(feature[i, :], energy_vector) new_feature.append(temp) else: new_feature.append(feature[i, :]) new_feature = np.array(new_feature) #print("new_feature: ", new_feature.shape) if col == 0: ex_feature = new_feature else: ex_feature = np.append(ex_feature, new_feature, axis=1) #print("ex_feature: ", ex_feature.shape) num_features = ex_feature.shape[1] // 3 num_axis = int(test_config['X'].values[0]) +int(test_config['Y'].values[0]) + int(test_config['Z'].values[0]) trn_feature = np.empty([ex_feature.shape[0], num_axis*num_features]) k = 0 if int(test_config['X'].values[0]) == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, 0:num_features] k += 1 if int(test_config['Y'].values[0]) == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, num_features:num_features*2] k += 1 if int(test_config['Z'].values[0]) == 1: trn_feature[:, k*num_features:(k+1)*num_features] = ex_feature[:, num_features*2:num_features*3] k += 1; test_model = pickle.load(open(online_model, 'rb')) predict_output = test_model.predict(trn_feature) print(predict_output[0]) #res_text2.insert(END, predict_output[0]) #online_window = [] #online_window_size = 200 flag = False else: online_window.append(data_online) online_window_size -= 1