ordinal_model.result], feed_dict={input_images: batch_images_np, input_relation_table: batch_relation_table_np, input_loss_table_log: batch_loss_table_log_np, input_loss_table_pow: batch_loss_table_pow_np, input_batch_size: configs.batch_size}) print("Iter: {:07d}. Loss : {:07f}\n\n".format( data_index.val, loss)) print((len(img_path_for_show) * "{}\n").format( *zip(img_path_for_show, label_path_for_show))) # multiply the scale depth = depth - depth[:, 0] depth = cur_depth_scale * depth depth_eval.add(np.array(gt_depth_arr), depth) depth_eval.printMean() ############# evaluate the coords recovered from the gt 2d and gt root depth for b in range(configs.batch_size): c_j_2d, c_j_3d, _ = volume_utils.local_to_global( depth[b], depth_root_arr[b], crop_joints_2d_arr[b], source_txt_arr[b], center_arr[b], scale_arr[b]) coords_eval.add(gt_joints_3d_arr[b], c_j_3d) coords_eval.printMean() print("\n\n") depth_eval.save("../eval_result/ord_3_1/depth_eval_{}w.npy".format( cur_model_iterations / 10000)) coords_eval.save( "../eval_result/ord_3_1/coord_eval_{}w.npy".format( cur_model_iterations / 10000))
sys.stdout.write("Mean: ") mean_depth_eval.printMean() ##### The raw results raw_depth = cur_depth_scale * (raw_depth - raw_depth[:, 0][:, np.newaxis]) raw_joints_2d = raw_joints_2d * configs.coords_2d_scale raw_depth_eval.add(np.array(gt_depth_arr), raw_depth) sys.stdout.write("raw: ") raw_depth_eval.printMean() ############# evaluate the coords recovered from the pd 2d and gt root depth for b in range(configs.batch_size): mean_c_j_2d, mean_c_j_3d, _ = volume_utils.local_to_global( mean_depth[b], depth_root_arr[b], mean_joints_2d[b], source_txt_arr[b], center_arr[b], scale_arr[b]) mean_coords_eval.add( gt_joints_3d_arr[b] - gt_joints_3d_arr[b][0], mean_c_j_3d - mean_c_j_3d[0]) raw_c_j_2d, raw_c_j_3d, _ = volume_utils.local_to_global( raw_depth[b], depth_root_arr[b], raw_joints_2d[b], source_txt_arr[b], center_arr[b], scale_arr[b]) raw_coords_eval.add( gt_joints_3d_arr[b] - gt_joints_3d_arr[b][0], raw_c_j_3d - raw_c_j_3d[0]) sys.stdout.write("Mean: ") mean_coords_eval.printMean() sys.stdout.write("Raw: ")
map(lambda x: volume_utils.voxel_z_centers[x], mean_vol_joints[:, :, 2].tolist())) mean_pd_coords_2d = mean_vol_joints[:, :, 0: 2] * configs.coords_2d_scale raw_vol_joints = raw_vol_joints.astype(np.int32) raw_pd_depth = np.array( map(lambda x: volume_utils.voxel_z_centers[x], raw_vol_joints[:, :, 2].tolist())) raw_pd_coords_2d = raw_vol_joints[:, :, 0: 2] * configs.coords_2d_scale # ############# evaluate the coords recovered from the gt 2d and gt root depth for b in range(configs.batch_size): mean_c_j_2d_pd, mean_c_j_3d_pd, _ = volume_utils.local_to_global( mean_pd_depth[b], depth_root_arr[b], mean_pd_coords_2d[b], source_txt_arr[b], center_arr[b], scale_arr[b]) raw_c_j_2d_pd, raw_c_j_3d_pd, _ = volume_utils.local_to_global( raw_pd_depth[b], depth_root_arr[b], raw_pd_coords_2d[b], source_txt_arr[b], center_arr[b], scale_arr[b]) #### Use the mean skeleton to evaluate opt_mean_c_j_3d_pd = np.reshape( skeleton_opt.opt( volume_utils.recover_2d( mean_pd_coords_2d[b], scale=scale_arr[b], center=center_arr[b]).flatten().tolist(), mean_pd_depth[b].flatten().tolist(), cam_matrix_arr[b].flatten().tolist()), [-1, 3])
m_btn_callback.reset() # cur_index = data_index.val cur_index = 628 cropped_img = cv2.imread(images_file_fn(cur_index)) cur_label = np.load(annots_file_fn(cur_index)).tolist() joints_3d = cur_label["joints_3d"] joints_2d = cur_label["joints_2d"] cur_depth = joints_3d[:, 2] - joints_3d[0, 2] root_depth = joints_3d[0, 2] joints_2d, joints_3d, proj_mat = volume_utils.local_to_global( cur_depth, root_depth, joints_2d, cur_label["source"], cur_label["center"], cur_label["scale"]) visualBox.setProjMat(proj_mat) cur_img = volume_utils.put_cropped_back(cropped_img, cur_label["center"], cur_label["scale"]) cur_img = display_utils.drawLines(cur_img, joints_2d, indices=pose_defs.h36m_pose) cur_img = display_utils.drawPoints(cur_img, joints_2d) # print(joints_3d - joints_3d[0]) # joints_3d -= joints_3d[0] # minus the root