def forward(self, images): heatmaps, features = self.backbone(images) heatmapSpine = heatmaps[:, 6, :, :].unsqueeze(1) jointSpine2d = op.integrate_tensor_2d(heatmapSpine * self.heatmap_multiplier) features = self.process_features(features) return jointSpine2d, features
def forward(self, images, prjMats): images = images.permute(0, 3, 1, 2) ###################################### heatmaps, features, confidence = self.backbone(images) # n*17 joints2d = op.integrate_tensor_2d(heatmaps * self.heatmap_multiplier) # n*17*2 n_views = images.shape[0] confidenceEx = confidence.unsqueeze(2).unsqueeze(3).expand( n_views, 17, 2, 4) # [n*17] -> [n*17*1*1] -> [n*17*2*4] joints2d = joints2d.unsqueeze_(3).expand( n_views, 17, 2, 4) # [n*17*2] -> [n*17*2*1] -> [n*17*2*4] prjMats1 = prjMats[:, 2:3].expand(n_views, 2, 4) # [n*1*4] -> [n*2*4] prjMats2 = prjMats[:, :2] # [n*2*4] prjMats1 = prjMats1.unsqueeze_(1).expand( n_views, 17, 2, 4) # [n*2*4] -> [n*1*2*4] -> [n*17*2*4] prjMats2 = prjMats2.unsqueeze_(1).expand( n_views, 17, 2, 4) # [n*2*4] -> [n*1*2*4] -> [n*17*2*4] A = confidenceEx * (joints2d * prjMats1 - prjMats2) # [n*17*2*4] ###################################### return A, confidence
def forward(self, images): # preprocess ######################################### images = images[:, :, :, [2, 1, 0]] images = images.permute(0, 3, 1, 2).float() images *= 0.017353650 images -= 1.986020923 ######################################### heatmaps, _, alg_confidences = self.backbone(images) keypoints_2d = op.integrate_tensor_2d( heatmaps * self.heatmap_multiplier) / 96 * 384 return keypoints_2d, alg_confidences
def forward(self, images, proj_matricies): device = images.device batch_size, n_views = images.shape[:2] # reshape for backbone forward images = images.view(-1, *images.shape[2:]) # forward backbone heatmaps, features = self.backbone(images) keypoints_2d = op.integrate_tensor_2d( heatmaps * self.heatmap_multiplier, self.heatmap_softmax) # reshape back keypoints_2d = keypoints_2d.view(batch_size, n_views, *keypoints_2d.shape[1:]) # triangulate keypoints_3d_Alg = multiview.triangulate_batch_of_points( proj_matricies, keypoints_2d, #confidences_batch=alg_confidences ) ## triangulate #try: # keypoints_3d_Alg = multiview.triangulate_batch_of_points( # proj_matricies, keypoints_2d, # #confidences_batch=alg_confidences # ) #except RuntimeError as e: # print("Error: ", e) # print("confidences =", confidences_batch_pred) # print("proj_matricies = ", proj_matricies) # print("keypoints_2d_batch_pred =", keypoints_2d_batch_pred) # exit() # ALG ################################ # build coord volumes coord_volumes = torch.zeros(batch_size, self.volume_size, self.volume_size, self.volume_size, 3, device=device) # Bx64x64x64x3 for batch_i in range(batch_size): keypoints_3d = keypoints_3d_Alg[0].to( 'cpu').detach().numpy().copy() base_point = keypoints_3d[6, :3] # build cuboid sides = np.array( [self.cuboid_side, self.cuboid_side, self.cuboid_side]) position = base_point - sides / 2 # build coord volume xxx, yyy, zzz = torch.meshgrid( torch.arange(self.volume_size, device=device), torch.arange(self.volume_size, device=device), torch.arange(self.volume_size, device=device)) grid = torch.stack([xxx, yyy, zzz], dim=-1).type(torch.float) grid = grid.reshape((-1, 3)) grid_coord = torch.zeros_like(grid) grid_coord[:, 0] = position[0] + (sides[0] / (self.volume_size - 1)) * grid[:, 0] grid_coord[:, 1] = position[1] + (sides[1] / (self.volume_size - 1)) * grid[:, 1] grid_coord[:, 2] = position[2] + (sides[2] / (self.volume_size - 1)) * grid[:, 2] coord_volumes[batch_i] = grid_coord.reshape( self.volume_size, self.volume_size, self.volume_size, 3) # process features before unprojecting #features = features.view(batch_size, n_views, *features.shape[1:]) features = features.view(-1, *features.shape[2:]) features = self.process_features(features) features = features.view(batch_size, n_views, *features.shape[1:]) # lift to volume volumes = op.unproject_heatmaps( features, proj_matricies, coord_volumes, volume_aggregation_method=self.volume_aggregation_method) # integral 3d volumes = self.volume_net(volumes) vol_keypoints_3d = op.integrate_tensor_3d_with_coordinates( volumes * self.volume_multiplier, coord_volumes, softmax=self.volume_softmax) return vol_keypoints_3d, features, volumes, coord_volumes
def forward(self, images, proj_matricies, batch): device = images.device batch_size, n_views = images.shape[:2] # reshape n_views dimension to batch dimension images = images.view(-1, *images.shape[2:]) # forward backbone and integral if self.use_confidences: # confidence 直接由backbone(pose_resnet)提供 heatmaps, _, alg_confidences, _ = self.backbone(images) else: heatmaps, _, _, _ = self.backbone(images) alg_confidences = torch.ones(batch_size * n_views, heatmaps.shape[1]).type( torch.float).to(device) # torch中的view就是reshape heatmaps_before_softmax = heatmaps.view(batch_size, n_views, *heatmaps.shape[1:]) # soft argmax keypoints_2d, heatmaps = op.integrate_tensor_2d( heatmaps * self.heatmap_multiplier, self.heatmap_softmax) # reshape back images = images.view(batch_size, n_views, *images.shape[1:]) heatmaps = heatmaps.view(batch_size, n_views, *heatmaps.shape[1:]) keypoints_2d = keypoints_2d.view(batch_size, n_views, *keypoints_2d.shape[1:]) alg_confidences = alg_confidences.view(batch_size, n_views, *alg_confidences.shape[1:]) # norm confidences alg_confidences = alg_confidences / alg_confidences.sum(dim=1, keepdim=True) alg_confidences = alg_confidences + 1e-5 # for numerical stability # calcualte shapes image_shape = tuple(images.shape[3:]) batch_size, n_views, n_joints, heatmap_shape = heatmaps.shape[ 0], heatmaps.shape[1], heatmaps.shape[2], tuple(heatmaps.shape[3:]) # upscale keypoints_2d, because image shape != heatmap shape keypoints_2d_transformed = torch.zeros_like(keypoints_2d) keypoints_2d_transformed[:, :, :, 0] = keypoints_2d[:, :, :, 0] * ( image_shape[1] / heatmap_shape[1]) keypoints_2d_transformed[:, :, :, 1] = keypoints_2d[:, :, :, 1] * ( image_shape[0] / heatmap_shape[0]) keypoints_2d = keypoints_2d_transformed # triangulate try: keypoints_3d = multiview.triangulate_batch_of_points( proj_matricies, keypoints_2d, confidences_batch=alg_confidences) except RuntimeError as e: print("Error: ", e) print("confidences =", confidences_batch_pred) print("proj_matricies = ", proj_matricies) print("keypoints_2d_batch_pred =", keypoints_2d_batch_pred) exit() return keypoints_3d, keypoints_2d, heatmaps, alg_confidences
def forward(self, images): heatmaps, features = self.backbone(images) joints2d = op.integrate_tensor_2d(heatmaps * self.heatmap_multiplier) return joints2d
def forward(self, images): images = images.permute(0, 3, 1, 2) heatmaps, _, alg_confidences = self.backbone(images) keypoints_2d = op.integrate_tensor_2d(heatmaps * self.heatmap_multiplier) return keypoints_2d, alg_confidences
def forward(self, images): device = images.device heatmaps, _, alg_confidences = self.backbone(images) keypoints_2d = op.integrate_tensor_2d(heatmaps * self.heatmap_multiplier, self.heatmap_softmax) return keypoints_2d, alg_confidences