def get_predicted_features(self, pos_past, traj, height, orient_pred, quat_pred): num_samples = quat_pred.shape[0] num_frames = quat_pred.shape[1] orient_pred = orient_pred.view(num_samples, num_frames, 1, -1) quat_pred = quat_pred.view(num_samples, num_frames, self.V - 1, -1) quats_world = torch.cat((orient_pred, quat_pred), dim=-2) root_pred = torch.zeros( (num_samples, num_frames, self.C)).cuda().float() root_pred[:, :, [0, 2]] = traj root_pred[:, :, 1] = height pos_pred = MocapDataset.forward_kinematics( quats_world, root_pred, self.joint_parents, torch.from_numpy(self.joint_offsets).float().cuda()) affs_pred = torch.tensor( MocapDataset.get_affective_features( pos_pred.detach().cpu().numpy())).cuda().float() spline = [] for s in range(num_samples): data_pred_curr = dict() data_pred_curr['positions'] = torch.cat( (pos_past[s], pos_pred[s]), dim=0).detach().cpu().numpy() data_pred_curr[ 'trans_and_controls'] = MocapDataset.compute_translations_and_controls( data_pred_curr) spline.append( Spline.extract_spline_features( MocapDataset.compute_splines(data_pred_curr))[0][-1:]) spline_pred = torch.from_numpy(np.stack(spline, axis=0)).cuda().float() return pos_pred, affs_pred, spline_pred
def return_batch(self, batch_size, dataset): if len(batch_size) > 1: rand_keys = np.copy(batch_size) batch_size = len(batch_size) else: batch_size = batch_size[0] probs = [] for k in dataset.keys(): if 'spline' not in dataset[k]: raise KeyError( 'No splines found. Perhaps you forgot to compute them?' ) probs.append(dataset[k]['spline'].size()) probs = np.array(probs) / np.sum(probs) rand_keys = np.random.choice(len(dataset), size=batch_size, replace=False, p=probs) batch_pos = np.zeros((batch_size, self.T, self.V, self.C), dtype='float32') batch_traj = np.zeros((batch_size, self.T, self.C), dtype='float32') batch_quat = np.zeros((batch_size, self.T, (self.V - 1) * self.D), dtype='float32') batch_orient = np.zeros((batch_size, self.T, self.O), dtype='float32') batch_affs = np.zeros((batch_size, self.T, self.A), dtype='float32') batch_spline = np.zeros((batch_size, self.T, self.S), dtype='float32') batch_phase_and_root_speed = np.zeros((batch_size, self.T, self.PRS), dtype='float32') batch_labels = np.zeros((batch_size, 1, self.num_labels[0]), dtype='float32') for i, k in enumerate(rand_keys): pos = dataset[str(k)]['positions_world'] traj = dataset[str(k)]['trajectory'] quat = dataset[str(k)]['rotations'] orient = dataset[str(k)]['orientations'] affs = dataset[str(k)]['affective_features'] spline, phase = Spline.extract_spline_features( dataset[str(k)]['spline']) root_speed = dataset[str(k)]['trans_and_controls'][:, -1].reshape( -1, 1) labels = dataset[str(k)]['labels'][:self.num_labels[0]] batch_pos[i] = pos batch_traj[i] = traj batch_quat[i] = quat.reshape(self.T, -1) batch_orient[i] = orient.reshape(self.T, -1) batch_affs[i] = affs batch_spline[i] = spline batch_phase_and_root_speed[i] = np.concatenate((phase, root_speed), axis=-1) batch_labels[i] = np.expand_dims(labels, axis=0) return batch_pos, batch_traj, batch_quat, batch_orient, batch_affs, batch_spline,\ batch_phase_and_root_speed, batch_labels
def return_batch(self, batch_size, dataset, randomized=True): if len(batch_size) > 1: rand_keys = np.copy(batch_size) batch_size = len(batch_size) else: batch_size = batch_size[0] probs = [] for k in dataset.keys(): if 'spline' not in dataset[k]: raise KeyError('No splines found. Perhaps you forgot to compute them?') probs.append(dataset[k]['spline'].size()) probs = np.array(probs) / np.sum(probs) if randomized: rand_keys = np.random.choice(len(dataset), size=batch_size, replace=False, p=probs) else: rand_keys = np.arange(batch_size) batch_pos = np.zeros((batch_size, self.T, self.V, self.C), dtype='float32') batch_quat = np.zeros((batch_size, self.T, (self.V - 1) * self.D), dtype='float32') batch_orient = np.zeros((batch_size, self.T, self.O), dtype='float32') batch_z_mean = np.zeros((batch_size, self.Z), dtype='float32') batch_z_dev = np.zeros((batch_size, self.T, self.Z), dtype='float32') batch_root_speed = np.zeros((batch_size, self.T, self.RS), dtype='float32') batch_affs = np.zeros((batch_size, self.T, self.A), dtype='float32') batch_spline = np.zeros((batch_size, self.T, self.S), dtype='float32') batch_labels = np.zeros((batch_size, 1, self.num_labels[0]), dtype='float32') pseudo_passes = (len(dataset) + batch_size - 1) // batch_size for i, k in enumerate(rand_keys): pos = dataset[str(k)]['positions'][:self.T] quat = dataset[str(k)]['rotations'][:self.T, 1:] orient = dataset[str(k)]['rotations'][:self.T, 0] affs = dataset[str(k)]['affective_features'][:self.T] spline, phase = Spline.extract_spline_features(dataset[str(k)]['spline']) spline = spline[:self.T] phase = phase[:self.T] z = dataset[str(k)]['trans_and_controls'][:, 1][:self.T] z_mean = np.mean(z[:self.prefix_length]) z_dev = z - z_mean root_speed = dataset[str(k)]['trans_and_controls'][:, -1][:self.T] labels = dataset[str(k)]['labels'][:self.num_labels[0]] batch_pos[i] = pos batch_quat[i] = quat.reshape(self.T, -1) batch_orient[i] = orient.reshape(self.T, -1) batch_z_mean[i] = z_mean.reshape(-1, 1) batch_z_dev[i] = z_dev.reshape(self.T, -1) batch_root_speed[i] = root_speed.reshape(self.T, 1) batch_affs[i] = affs batch_spline[i] = spline batch_labels[i] = np.expand_dims(labels, axis=0) return batch_pos, batch_quat, batch_orient, batch_z_mean, batch_z_dev,\ batch_root_speed, batch_affs, batch_spline, batch_labels
def get_predicted_features(self, pos_past, orient_past, traj, height, quat_pred, orient_pred): num_samples = quat_pred.shape[0] num_frames = quat_pred.shape[1] offsets = torch.from_numpy(self.joint_offsets).cuda().float(). \ unsqueeze(0).unsqueeze(0).repeat(num_samples, num_frames, 1, 1) quat_pred = quat_pred.view(num_samples, num_frames, self.V - 1, -1) zeros = torch.zeros_like(orient_pred) quats_world = quat_pred.clone() quats_world = torch.cat((expmap_to_quaternion( torch.cat((zeros, orient_pred, zeros), dim=-1)).unsqueeze(-2), quats_world), dim=-2) pos_pred = torch.zeros( (num_samples, num_frames, self.V, self.C)).cuda().float() pos_pred[:, :, 0, [0, 2]] = traj pos_pred[:, :, 0, 1] = height # for joint in range(self.V): # if self.joint_parents[joint] == -1: # quats_world[:, :, joint] = expmap_to_quaternion(torch.cat((zeros, orient_pred, zeros), dim=-1)) # else: # pos_pred[:, :, joint] = qrot(quats_world[:, :, self.joint_parents[joint]], offsets[:, :, joint]) \ # + pos_pred[:, :, self.joint_parents[joint]] # quats_world[:, :, joint] = qmul(quats_world[:, :, self.joint_parents[joint]], # quat_pred[:, :, joint - 1]) for joint in range(1, self.V): pos_pred[:, :, joint] = qrot(quats_world[:, :, joint], offsets[:, :, joint]) \ + pos_pred[:, :, self.joint_parents[joint]] affs_pred = torch.tensor( MocapDataset.get_affective_features( pos_pred.detach().cpu().numpy())).cuda().float() spline = [] for s in range(num_samples): data_pred_curr = dict() data_pred_curr['positions_world'] = torch.cat( (pos_past[s], pos_pred[s]), dim=0).detach().cpu().numpy() data_pred_curr['trajectory'] = data_pred_curr['positions_world'][:, 0] data_pred_curr['orientations'] = torch.cat( (orient_past[s], orient_pred[s]), dim=0).squeeze().detach().cpu().numpy() data_pred_curr[ 'trans_and_controls'] = MocapDataset.compute_translations_and_controls( data_pred_curr) spline.append( Spline.extract_spline_features( MocapDataset.compute_splines(data_pred_curr))[0][-1:]) spline_pred = torch.from_numpy(np.stack(spline, axis=0)).cuda().float() return pos_pred, affs_pred, spline_pred