def compute_fse(predicted_trajs, gt_traj): first_nz = plot_utils.first_nonzero(np.sum(gt_traj.cpu().numpy(), axis=2)[0, ::-1], axis=0) if first_nz < 0: return None last_gt_idx = gt_traj.shape[1] - first_nz final_error = torch.norm(predicted_trajs[:, last_gt_idx-1] - gt_traj[:, last_gt_idx-1], dim=1) return final_error
def get_kde_log_likelihoods(data_precondition, dataset_name, our_preds_most_likely_list, our_preds_list, sgan_preds_list, sgan_gt_list, eval_inputs, eval_data_dict, data_ids, t_predicts, random_scene_idxs, num_runs): eval_dt = eval_data_dict['dt'] all_methods_preds_dict = defaultdict(list) gt_dicts = list() for run in range(num_runs): sgan_preds = sgan_preds_list[run] our_most_likely_preds = our_preds_most_likely_list[run] our_full_preds = our_preds_list[run] random_scene_idx = random_scene_idxs[run] data_id = data_ids[random_scene_idx] t_predict = t_predicts[random_scene_idx] - 1 print(run, data_id, t_predict) sgan_preds = { key: value.cpu().numpy() for key, value in sgan_preds.items() } all_methods_preds_dict['sgan'].append(sgan_preds) methods_list = ['our_full', 'our_most_likely'] curr_gt = dict() for idx, preds_dict in enumerate( [our_full_preds, our_most_likely_preds]): curr_preds = dict() for key, value in preds_dict.items(): curr_state_val = eval_inputs[key][data_id, t_predict] pred_trajs = plot_utils.integrate_trajectory( value.cpu().numpy(), [0, 1], curr_state_val.cpu().numpy(), [0, 1], eval_dt, output_limit=max_speed, velocity_in=True).astype(np.float32) curr_preds[key] = pred_trajs[:, 0] if idx == 1: curr_gt[key] = eval_inputs[key][[data_id], t_predict + 1:t_predict + 1 + 12, :2].cpu().numpy() all_methods_preds_dict[methods_list[idx]].append(curr_preds) gt_dicts.append(curr_gt) detailed_ll_dict = { 'data_precondition': list(), 'dataset': list(), 'method': list(), 'run': list(), 'timestep': list(), 'node': list(), 'log-likelihood': list() } sgan_lls = list() our_full_lls = list() our_most_likely_lls = list() log_pdf_lower_bound = -20 for run in range(num_runs): sgan_preds = all_methods_preds_dict['sgan'][run] our_full_preds = all_methods_preds_dict['our_full'][run] our_most_likely_preds = all_methods_preds_dict['our_most_likely'][run] gt_dict = gt_dicts[run] for node in sgan_preds.keys(): first_nz = plot_utils.first_nonzero(np.sum(gt_dict[node], axis=2)[0, ::-1], axis=0) if first_nz < 0: continue num_timesteps = gt_dict[node].shape[1] - first_nz sgan_ll = 0.0 our_full_ll = 0.0 our_most_likely_ll = 0.0 for timestep in range(num_timesteps): curr_gt = gt_dict[node][:, timestep] sgan_scipy_kde = gaussian_kde(sgan_preds[node][:, timestep].T) our_full_scipy_kde = gaussian_kde( our_full_preds[node][:, timestep].T) our_most_likely_scipy_kde = gaussian_kde( our_most_likely_preds[node][:, timestep].T) # We need [0] because it's a (1,)-shaped numpy array. sgan_log_pdf = np.clip(sgan_scipy_kde.logpdf(curr_gt.T), a_min=log_pdf_lower_bound, a_max=None)[0] our_full_pdf = np.clip(our_full_scipy_kde.logpdf(curr_gt.T), a_min=log_pdf_lower_bound, a_max=None)[0] our_most_likely_pdf = np.clip(our_most_likely_scipy_kde.logpdf( curr_gt.T), a_min=log_pdf_lower_bound, a_max=None)[0] for idx, result in enumerate( [sgan_log_pdf, our_full_pdf, our_most_likely_pdf]): detailed_ll_dict['data_precondition'].append( data_precondition) detailed_ll_dict['dataset'].append(dataset_name) detailed_ll_dict['method'].append(method_names[idx]) detailed_ll_dict['run'].append(run) detailed_ll_dict['timestep'].append(timestep) detailed_ll_dict['node'].append(str(node)) detailed_ll_dict['log-likelihood'].append(result) sgan_ll += sgan_log_pdf / num_timesteps our_full_ll += our_full_pdf / num_timesteps our_most_likely_ll += our_most_likely_pdf / num_timesteps sgan_lls.append(sgan_ll) our_full_lls.append(our_full_ll) our_most_likely_lls.append(our_most_likely_ll) return sgan_lls, our_full_lls, our_most_likely_lls, detailed_ll_dict