# Segmentation for n in range(nSignals): segmented_emg.append(fex.segmentation(emg[n])) # Feature calculation feature_list = [fex.mav, fex.rms, fex.var, fex.ssi, fex.zc, fex.wl, fex.ssc, fex.wamp] nSegments = len(segmented_emg[0][0]) nFeatures = len(feature_list) feature_matrix = np.zeros((nGestures*nIterations*nSegments,nFeatures*nChannels)) n = 0 for i in range(0,nSignals,nChannels): for j in range(nSegments): feature_matrix[n] = fex.features((segmented_emg[i][:,j],segmented_emg[i+1][:,j]),feature_list) n = n + 1 # Target matrix generation y = fex.gestures(nIterations*nSegments,nGestures) # Dimensionality reduction and feature scaling [X,reductor,scaler] = fex.feature_scaling(feature_matrix, y) # Split dataset into training and testing datasets X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=42) # Classifier training classifier = SVC(kernel='rbf',C=10,gamma=10) classifier.fit(X_train,y_train)
emg.append(emg_data['motion'+str(m)+'_ch'+str(c)][:,i]) #motion1_ch1_i1, motion1_ch2_i1, motion1_ch1_i2, motion1_ch2_i2 for n in range(len(emg)): emg_seg.append(fex.segmentation(emg[n])) feature_list = [fex.mav, fex.rms, fex.var, fex.ssi, fex.zc, fex.wl, fex.ssc, fex.wamp] nSegments = len(emg_seg[0][0]) nFeatures = len(feature_list) feature_matrix = np.zeros((nSegments*nIterations*nMotions,nFeatures*nChannels)) n = 0 for i in range(0,len(emg_seg),nChannels): for j in range(nSegments): feature_matrix[n] = fex.features((emg_seg[i][:,j],emg_seg[i+1][:,j]),feature_list) n += 1 from sklearn.svm import SVC import timeit target = fex.gestures(nIterations*nSegments,nMotions) #target = np.concatenate((0*np.ones((128,1)),1*np.ones((128,1)),2*np.ones((128,1)),3*np.ones((128,1)),4*np.ones((128,1))),axis=0).ravel() tic = timeit.default_timer() [feat_scaled,reductor,scaler] = fex.feature_scaling(feature_matrix, target)
segmented_emg.append(fex.segmentation(emg[n])) # Feature calculation feature_list = [ fex.mav, fex.rms, fex.var, fex.ssi, fex.zc, fex.wl, fex.ssc, fex.wamp ] nSegments = len(segmented_emg[0][0]) nFeatures = len(feature_list) feature_matrix = np.zeros( (nGestures * nIterations * nSegments, nFeatures * nChannels)) n = 0 for i in range(0, nSignals, nChannels): for j in range(nSegments): feature_matrix[n] = fex.features( (segmented_emg[i][:, j], segmented_emg[i + 1][:, j]), feature_list) n = n + 1 # Target matrix generation y = fex.gestures(nIterations * nSegments, nGestures) # Dimensionality reduction and feature scaling [X, reductor, scaler] = fex.feature_scaling(feature_matrix, y) # Split dataset into training and testing datasets X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=42) # Classifier training
feature_list = [fex.mav, fex.rms, fex.var, fex.ssi, fex.zc, fex.wl, fex.ssc, fex.wamp] n_segments = len(segmented_emg[0][0]) for i in range(0,n_signals,n_channels): if len(segmented_emg[i][0]) < n_segments : n_segments = len(segmented_emg[i][0]) n_features = len(feature_list) feature_matrix = np.zeros((n_classes*n_iterations*n_segments,n_features*n_channels)) n = 0 for i in range(0,n_signals,n_channels): for j in range(n_segments): feature_matrix[n] = fex.features((segmented_emg[i][:,j], segmented_emg[i+1][:,j], segmented_emg[i+2][:,j], segmented_emg[i+3][:,j], segmented_emg[i+4][:,j], segmented_emg[i+5][:,j], segmented_emg[i+6][:,j], segmented_emg[i+7][:,j]),feature_list) n = n + 1 # Target matrix generation y = fex.generate_target(n_iterations*n_segments,class_labels) # Dimensionality reduction and feature scaling [X,reductor,scaler] = fex.feature_scaling(feature_matrix, y) # Split dataset into training and testing datasets X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=42) # Classifier training
def process_emg(data): global count, emg, classifier, class_labels if count < 4: count += 1 emg.append(np.array(data[0])) emg.append(np.array(data[1])) else: count = 0 n_classes = 1 n_iterations = 1 n_channels = 8 #n_signals = 8 n_signals = n_classes * n_iterations * n_channels segmented_emg = list() print("EMG: ") print(emg) # Segmentation for n in range(n_signals): segmented_emg.append(fex.segmentation(emg[n], n_samples=1)) # Feature calculation feature_list = [ fex.mav, fex.rms, fex.var, fex.ssi, fex.zc, fex.wl, fex.ssc, fex.wamp ] n_segments = len(segmented_emg[0][0]) print("\nN_Segments: " + str(n_segments) + "\n") n_features = len(feature_list) feature_matrix = np.zeros( (n_classes * n_iterations * n_segments, n_features * n_channels)) n = 0 print("Feature_Matrix_SHAPE: ") print(feature_matrix.shape) print("") print("Segmented_EMG: ") print(segmented_emg) print("") for i in range(0, n_classes): for j in range(n_segments): #print("i: " + str(i)) #print("j: " + str(j)) feature_matrix[n] = fex.features( (segmented_emg[i][:, j], segmented_emg[i + 1][:, j], segmented_emg[i + 2][:, j], segmented_emg[i + 3][:, j], segmented_emg[i + 4][:, j], segmented_emg[i + 5][:, j], segmented_emg[i + 6][:, j], segmented_emg[i + 7][:, j]), feature_list) n = n + 1 # Target matrix generation print(feature_matrix) print(feature_matrix.shape) y = fex.generate_target(n_iterations * n_segments, class_labels) print(y) print(y.shape) # Dimensionality reduction and feature scaling [X, reductor, scaler] = fex.feature_scaling(feature_matrix, y) # Classification print(X) print(X.shape) predict = classifier.predict(feature_matrix) print(predict) emg = list()