def __init__(self, seq_type, root_dir, data_list, cache_path=None, step_size=100, window_size=400, random_shift=0, transform=None, **kwargs): super(SequenceToSequenceDataset, self).__init__() self.seq_type = seq_type self.feature_dim = seq_type.feature_dim self.target_dim = seq_type.target_dim self.aux_dim = seq_type.aux_dim self.window_size = window_size self.step_size = step_size self.random_shift = random_shift self.transform = transform self.data_path = [osp.join(root_dir, data) for data in data_list] self.index_map = [] self.features, self.targets, aux = load_cached_sequences( seq_type, root_dir, data_list, cache_path, **kwargs) # Optionally smooth the sequence feat_sigma = kwargs.get('feature_sigma,', -1) targ_sigma = kwargs.get('target_sigma,', -1) if feat_sigma > 0: self.features = [ gaussian_filter1d(feat, sigma=feat_sigma, axis=0) for feat in self.features ] if targ_sigma > 0: self.targets = [ gaussian_filter1d(targ, sigma=targ_sigma, axis=0) for targ in self.targets ] max_norm = kwargs.get('max_velocity_norm', 3.0) self.ts, self.orientations, self.gt_pos, self.local_v = [], [], [], [] for i in range(len(data_list)): self.features[i] = self.features[i][:-1] self.targets[i] = self.targets[i] self.ts.append(aux[i][:-1, :1]) self.orientations.append(aux[i][:-1, 1:5]) self.gt_pos.append(aux[i][:-1, 5:8]) velocity = np.linalg.norm( self.targets[i], axis=1) # Remove outlier ground truth data bad_data = velocity > max_norm for j in range(window_size + random_shift, self.targets[i].shape[0], step_size): if not bad_data[j - window_size - random_shift:j + random_shift].any(): self.index_map.append([i, j]) if kwargs.get('shuffle', True): random.shuffle(self.index_map)
def __init__(self, seq_type, root_dir, data_list, cache_path=None, step_size=10, window_size=200, random_shift=0, transform=None, **kwargs): super().__init__() self.feature_dim = seq_type.feature_dim self.target_dim = seq_type.target_dim self.aux_dim = seq_type.aux_dim self.window_size = window_size self.step_size = step_size self.random_shift = random_shift self.transform = transform self.data_path = [osp.join(root_dir, data) for data in data_list] self.index_map = [] self.ts, self.orientations, self.gt_pos = [], [], [] self.features, self.targets, aux = load_cached_sequences( seq_type, root_dir, data_list, cache_path, interval=1, **kwargs) # Optionally smooth the sequence feat_sigma = kwargs.get('feature_sigma,', -1) targ_sigma = kwargs.get('target_sigma,', -1) if feat_sigma > 0: self.features = [gaussian_filter1d(feat, sigma=feat_sigma, axis=0) for feat in self.features] if targ_sigma > 0: self.targets = [gaussian_filter1d(targ, sigma=targ_sigma, axis=0) for targ in self.targets] for i in range(len(data_list)): self.ts.append(aux[i][:, 0]) self.orientations.append(aux[i][:, 1:5]) self.gt_pos.append(aux[i][:, -3:]) self.index_map += [[i, j] for j in range(window_size, self.targets[i].shape[0], step_size)] if kwargs.get('shuffle', True): random.shuffle(self.index_map)
def __init__(self, seq_type, root_dir, data_list, cache_path=None, step_size=10, window_size=200, random_shift=0, transform=None, **kwargs): super(StridedSequenceDataset, self).__init__() self.feature_dim = seq_type.feature_dim self.target_dim = seq_type.target_dim self.aux_dim = seq_type.aux_dim self.window_size = window_size self.step_size = step_size self.random_shift = random_shift self.transform = transform self.interval = kwargs.get('interval', window_size) self.data_path = [osp.join(root_dir, data) for data in data_list] self.index_map = [] self.ts, self.orientations, self.gt_pos = [], [], [] self.features, self.targets, aux = load_cached_sequences( seq_type, root_dir, data_list, cache_path, interval=self.interval, **kwargs) for i in range(len(data_list)): self.ts.append(aux[i][:, 0]) self.orientations.append(aux[i][:, 1:5]) self.gt_pos.append(aux[i][:, -3:]) self.index_map += [[i, j] for j in range(0, self.targets[i].shape[0], step_size)] if kwargs.get('shuffle', True): random.shuffle(self.index_map)