def load_isolated_dataset(self): encoded_templates = list() templates_labels = np.array([]) data = self.load_data() labels = data[:, -1] v_torso = np.array([-6, 0, 0]) v_others = np.array([3, 0, 0]) torso_quat = [Quaternion(q) for q in data[:, 46:50]] rua_quat = [Quaternion(q) for q in data[:, 59:63]] rla_quat = [Quaternion(q) for q in data[:, 72:76]] torso_vectors = [q.rotate(v_torso) for q in torso_quat] rua_vectors = [ rua_quat[i].rotate(v_others) + torso_vectors[i] for i in range(len(rua_quat)) ] rla_vectors = np.array([ rla_quat[i].rotate(v_others) + rua_vectors[i] for i in range(len(rla_quat)) ]) rla_vectors = np.nan_to_num(rla_vectors) rla_vectors[:, 0] = butter_lowpass_filter(rla_vectors[:, 0], 2.5, 30) rla_vectors[:, 1] = butter_lowpass_filter(rla_vectors[:, 1], 2.5, 30) rla_vectors[:, 2] = butter_lowpass_filter(rla_vectors[:, 2], 2.5, 30) templates, tmp_templates_labels = Dataset.segment_data( rla_vectors, labels) templates = [ templates[t] for t in np.where(tmp_templates_labels > 0)[0] ] tmp_templates_labels = tmp_templates_labels[np.where( tmp_templates_labels > 0)[0]] encoded_templates += self.encode_isolated_trajectories(templates) templates_labels = np.append(templates_labels, tmp_templates_labels) return encoded_templates, templates_labels
def load_encode_dataset(self, use_torso=False): files = [file for file in glob.glob(join(self.dataset_path, "subject{}_*.txt".format(self.__default_user)))] encoded_templates = list() templates_labels = np.array([]) for file in files: data = np.loadtxt(file) labels = data[:, -1] v_torso = np.array([-6, 0, 0]) v_others = np.array([3, 0, 0]) v_hand = np.array([1, 0, 0]) torso_quat = [Quaternion(q) for q in data[:, 13:17]] rua_quat = [Quaternion(q) for q in data[:, 29:33]] rla_quat = [Quaternion(q) for q in data[:, 45:49]] rha_quat = [Quaternion(q) for q in data[:, 61:65]] if use_torso: torso_vectors = [q.rotate(v_torso) for q in torso_quat] rua_vectors = [rua_quat[i].rotate(v_others) + torso_vectors[i] for i in range(len(rua_quat))] else: rua_vectors = [rua_quat[i].rotate(v_others) for i in range(len(rua_quat))] rla_vectors = [rla_quat[i].rotate(v_others) + rua_vectors[i] for i in range(len(rla_quat))] rha_vectors = np.array([rha_quat[i].rotate(v_hand) + rla_vectors[i] for i in range(len(rha_quat))]) rha_vectors[:, 0] = butter_lowpass_filter(rha_vectors[:, 0], 2.5, 30) rha_vectors[:, 1] = butter_lowpass_filter(rha_vectors[:, 1], 2.5, 30) rha_vectors[:, 2] = butter_lowpass_filter(rha_vectors[:, 2], 2.5, 30) templates, tmp_templates_labels = Dataset.segment_data(rha_vectors, labels) templates = [templates[t] for t in np.where(tmp_templates_labels > 0)[0]] tmp_templates_labels = tmp_templates_labels[np.where(tmp_templates_labels > 0)[0]] encoded_templates += self.encode_isolated_trajectories(templates) templates_labels = np.append(templates_labels, tmp_templates_labels) return encoded_templates, templates_labels
def load_isolated_dataset(self, sensor_no=67): data = self.load_data() stream_labels = data[:, 249] sensor_data = np.nan_to_num(data[:, sensor_no]) tmp_data = butter_lowpass_filter(sensor_data, 3, self.frequency) bins = np.linspace(-10000, 10000, 128) quantized_data = np.digitize(tmp_data, bins[:-1]) return Dataset.segment_data(quantized_data, stream_labels)
def load_isolated_dataset(self): encoded_templates = list() templates_labels = np.array([]) datapaths = [ file for file in glob.glob(self.dataset_path + "./**/user*/labelled_data.txt", recursive=True) ] datapaths = sorted(datapaths) for d in datapaths[1:2]: with open(datapaths[3]) as f: data = np.array([line.strip().split() for line in f], float) # data = np.loadtxt(datapaths[0]) labels = data[:, -1] tmp_labels = labels[:-1] - labels[1:] idx_last_useful_label = np.where(tmp_labels == -1004)[0][-1] data = data[:idx_last_useful_label] labels = labels[:idx_last_useful_label] v_torso = np.array([0, -6, 0]) v_limbs = np.array([0, 3, 0]) v_hand = np.array([0, 1, 0]) torso_quat = [Quaternion(q) for q in data[:, 10:14]] rua_quat = [Quaternion(q) for q in data[:, 23:27]] rla_quat = [Quaternion(q) for q in data[:, 36:40]] rha_quat = [Quaternion(q) for q in data[:, 49:53]] torso_vectors = [q.rotate(v_torso) for q in torso_quat] rua_vectors = [ rua_quat[i].rotate(v_limbs) + torso_vectors[i] for i in range(len(rua_quat)) ] rla_vectors = np.array([ rla_quat[i].rotate(v_limbs) + rua_vectors[i] for i in range(len(rla_quat)) ]) rha_vectors = np.array( [rha_quat[i].rotate(v_hand) for i in range(len(rha_quat))]) rha_vectors[:, 0] = butter_lowpass_filter(rha_vectors[:, 0], 10, 500) rha_vectors[:, 1] = butter_lowpass_filter(rha_vectors[:, 1], 10, 500) rha_vectors[:, 2] = butter_lowpass_filter(rha_vectors[:, 2], 10, 500) templates, tmp_templates_labels = Dataset.segment_data( rha_vectors, labels) tmp_templates = [ templates[t] for t in np.where(tmp_templates_labels > 0)[0] ] tmp_templates_labels = tmp_templates_labels[np.where( tmp_templates_labels > 0)[0]] encoded_templates += self.encode_isolated_trajectories( tmp_templates) # encoded_templates += tmp_templates templates_labels = np.append(templates_labels, tmp_templates_labels) return encoded_templates, templates_labels
def plot_isolate_gestures(sensor_no=67, annotation_column=249): dataset = OpportunityDataset() data = dataset.load_data() stream_labels = data[:, annotation_column] tmp_data = data[:, sensor_no] templates, labels = Dataset.segment_data(tmp_data, stream_labels) labels = np.array(labels) templates = [templates[t] for t in np.where(labels > 0)[0]] labels = labels[np.where(labels > 0)[0]] plot_creator.plot_gestures(templates, labels) plt.show()
def plot_isolate_gestures(): dataset = WetlabDataset() data = dataset.load_data() timestamps = data[:, 0] stream_labels = data[:, -1] tmp_data = np.sqrt(data[:, 1]**2 + data[:, 2]**2 + data[:, 3]**2) # tmp_data = data[:, 2] templates, labels = Dataset.segment_data(tmp_data, stream_labels) labels = np.array(labels) templates = [templates[t] for t in np.where(labels > 0)[0]] labels = labels[np.where(labels > 0)[0]] plot_creator.plot_gestures(templates, labels) # subplt_1.plot(timestamps, data[:, sensor_no], linewidth=0.5) # subplt_2.plot(timestamps, data[:, 249], '.') plt.show()
def load_isolated_dataset(self, load_encoded=True): if load_encoded: return self.load_encoded_isolated_dataset() else: files = [ file for file in glob.glob(self.dataset_path + "*-table*-data.txt") ] files = sorted(files) data = np.loadtxt(files[0]) table_data = data[:, -4:-2] table_data = np.nan_to_num(table_data) table_data[:, 0] = butter_lowpass_filter(table_data[:, 0], 5, 200) table_data[:, 1] = butter_lowpass_filter(table_data[:, 1], 5, 200) labels = data[:, -2] labels[np.where(labels < 0)[0]] = 0 return Dataset.segment_data(table_data, labels)
def load_encoded_isolated_dataset(self): files = [ file for file in glob.glob(self.dataset_path + "*-table*-data.txt") ] files = sorted(files) encoded_streams = list() streams_labels = np.array([]) for f in files[:1]: with open(f) as input_file: data = np.array([line.strip().split() for line in input_file], float) table_data = data[:, -4:-2] table_data = np.nan_to_num(table_data) rotated_data = np.zeros_like(table_data) # rotated_data[:, 0] = butter_lowpass_filter(table_data[:, 0], 5, 200) # rotated_data[:, 1] = butter_lowpass_filter(-table_data[:, 1], 5, 200) rotated_data[:, 0] = table_data[:, 0] rotated_data[:, 1] = -table_data[:, 1] labels = data[:, -2] size_labels = data[:, -1] # Select only big gestures # table_data = table_data[np.where(size_labels == 2)[0]] # labels = labels[np.where(size_labels == 2)[0]] # labels[np.where(labels < 0)[0]] = 0 # Segment data templates, templates_labels = Dataset.segment_data( rotated_data, labels) lenghts_templates = np.array([len(t) for t in templates]) templates = [ templates[t] for t in np.where(lenghts_templates > 5)[0] ] templates_labels = templates_labels[np.where( lenghts_templates > 5)[0]] # Filter for only A-Z gestures templates = [ templates[t] for t in np.where((templates_labels >= 9) & (templates_labels < 35))[0] ] streams_labels = np.append( streams_labels, templates_labels[np.where((templates_labels >= 9) & (templates_labels < 35))[0]]) encoded_streams += self.encode_trajectory(templates) return encoded_streams, streams_labels
def load_isolated_dataset(self): templates = [] labels = [] datapaths = [ file for file in glob.glob(self.dataset_path + "./**/user*/labelled_data.txt", recursive=True) ] datapaths = sorted(datapaths) for d in datapaths[3:5]: with open(d) as f: data = np.array([line.strip().split() for line in f], float) # data = np.loadtxt(datapaths[0]) tmp_labels = data[:, -1] acc_data = data[:, 43:46] acc_x = decimate_signal( butter_lowpass_filter(acc_data[:, 0], 10, 500), 5) acc_y = decimate_signal( butter_lowpass_filter(acc_data[:, 1], 10, 500), 5) acc_z = decimate_signal( butter_lowpass_filter(acc_data[:, 2], 10, 500), 5) acc_data = np.array([acc_x, acc_y, acc_z]).T # acc_y = data[:, 41] # acc_z = data[:, 42] tmp_labels_idx = tmp_labels[:-1] - tmp_labels[1:] idx_last_useful_label = np.where(tmp_labels_idx == -1004)[0][-1] useful_data = acc_data[:idx_last_useful_label] useful_labels = tmp_labels[:idx_last_useful_label:5] norm_data = np.linalg.norm(useful_data, axis=1) print(max(norm_data)) print(min(norm_data)) bins = np.linspace(0, 42000, 128) quantized_data = np.digitize(norm_data, bins[:-1]) tmp_templates, tmp_templates_labels = Dataset.segment_data( quantized_data, useful_labels) templates += [ tmp_templates[t] for t in np.where(tmp_templates_labels > 0)[0] ] labels = np.append( labels, tmp_templates_labels[np.where(tmp_templates_labels > 0)[0]]) return templates, labels
def load_isolated_dataset(self, sensor_no=51): data = self.load_data() stream_labels = data[:, 0] acc_x = decimate_signal( butter_lowpass_filter(data[:, sensor_no], 5, self.frequency), 10) acc_y = decimate_signal( butter_lowpass_filter(data[:, sensor_no + 1], 5, self.frequency), 10) acc_z = decimate_signal( butter_lowpass_filter(data[:, sensor_no + 2], 5, self.frequency), 10) stream_labels = stream_labels[::10] stream_labels[np.where(stream_labels > 53)[0]] = 0 acc_data = np.array([acc_x, acc_y, acc_z]).T filtered_data = np.linalg.norm(acc_data, axis=1) max_value = 3500 min_value = 0 bins = np.arange(min_value, max_value, (max_value - min_value) / 64) quantized_data = np.digitize(filtered_data, bins) bins = np.arange(0, 64) processed_data = np.array([bins[x] for x in quantized_data], dtype=int) return Dataset.segment_data(processed_data, stream_labels)
def load_isolated_dataset(self, sensor_no=1): data = self.load_data() stream_labels = data.iloc[:, -1].values tmp_data = data[:, sensor_no] return Dataset.segment_data(tmp_data, stream_labels)