} torch_losses["total_loss"] = torch_losses["l2_loss"] + \ torch_losses["means_loss"] + \ torch_losses["covariance_loss"] torch_losses_take_step( loss_dict=torch_losses, optimizer=opt_tm, loss_names=["means_loss", "covariance_loss", "l2_loss"]) roll_average( loss_dict=torch_losses, mets_dict=mets, metrics=["means_loss", "covariance_loss", "l2_loss", "total_loss"], iteration=iteration) if (iteration + 1) % n_stats_to_tensorboard == 0: crayon_ship_metrics( ccexp, mets, ["means_loss", "covariance_loss", "l2_loss", "total_loss"], iteration) if (iteration + 1) % n_save == 0: torch.save(neural_map.state_dict(), f"{save_dir}/neural_map_{iteration}.model") eval_iters.append(iteration) # final evaluation logger.info("Evaluating transport map at the different iterations") evaluate(eval_iters, neural_map, save_dir, export_dir, plots_dir, crc_final)
torch_losses["total_loss"] = torch_losses["l2_loss"] + \ torch_losses["critic_loss"] if iteration % n_critic == 0: torch_losses_take_step(loss_dict=torch_losses, optimizer=opt_tm, loss_names=["total_loss"]) else: torch_losses_take_step(loss_dict=torch_losses, optimizer=opt_critic, loss_names=["critic_loss"], minimize=False) roll_average(loss_dict=torch_losses, mets_dict=mets, metrics=["critic_loss", "l2_loss", "total_loss"], iteration=iteration) if (iteration + 1) % n_stats_to_tensorboard == 0: crayon_ship_metrics(ccexp, mets, ["critic_loss", "l2_loss", "total_loss"], iteration) if (iteration + 1) % n_save == 0: torch.save(transport_map.state_dict(), f"{save_dir}/neural_map_{iteration}.model") eval_iters.append(iteration) # final evaluation evaluate(eval_iters, transport_map, save_dir, export_dir, plots_dir, crc_final)