def _preprocess_sensor_data(item, all_items, **kwargs): # get session boundaries metas = GroupBy.get_meta(item) # load data data_loader = delayed(fileio.load_sensor) loaded_data = data_loader(GroupBy.get_data(item)) # apply offset mapping get_offset = partial(dataset.get_offset, offset_column=1) offset_in_secs = delayed(get_offset)(GroupBy.get_data(item)) offset_data = delayed(dataframe.offset)(loaded_data, offset_in_secs) # apply orientation corrections orientation_correction = delayed(dataset.get_orientation_correction)( GroupBy.get_data(item)) flip_and_swap = apply_on_accelerometer_dataframe(orientation.flip_and_swap) corrected_data = delayed(flip_and_swap)(offset_data, x_flip=orientation_correction[0], y_flip=orientation_correction[1], z_flip=orientation_correction[2]) dataset_name = kwargs['dataset_name'] corrected_data = delayed(save_to_file)(corrected_data, metas, dataset_name) return GroupBy.bundle(corrected_data, **metas)
def preprocess_annotations(item, all_items, **kwargs): # get session boundaries metas = GroupBy.get_meta(item) # load data data_loader = delayed(fileio.load_annotation) loaded_data = data_loader(GroupBy.get_data(item)) return GroupBy.bundle(loaded_data, **metas)
def load_data(item, all_items, **kwargs): # get session boundaries metas = GroupBy.get_meta(item) # load data data_loader = delayed(fileio.load_sensor) loaded_data = data_loader(GroupBy.get_data(item)) return GroupBy.bundle(loaded_data, **metas)
def load_data(item, all_items, *, old_sr, new_sr, **kwargs): # get session boundaries metas = GroupBy.get_meta(item) # load data data_loader = delayed(fileio.load_sensor) loaded_data = data_loader(GroupBy.get_data(item)) if old_sr == new_sr: resampled_data = loaded_data else: print('resampling raw data...from {} to {}'.format(old_sr, new_sr)) resampled_data = resample_data(loaded_data, old_sr=old_sr, new_sr=new_sr) return GroupBy.bundle(resampled_data, **metas)
def load_data(data, all_data, **kwargs): metas = GroupBy.get_meta(data) return GroupBy.bundle(delayed(fileio.load_sensor)(GroupBy.get_data(data)), **metas)
def load_data(item, all_items): metas = GroupBy.get_meta(item) data_loader = delayed(fileio.load_sensor) return GroupBy.bundle(data_loader(GroupBy.get_data(item)), **metas)