# how many times we send stats to tensorboard n_stats_to_tensorboard = args.crayon_send_stats_iters logger.info(f"Sending stats to tensorboard every {n_stats_to_tensorboard} iterations") # how many times we save model n_save: int = round(args.n_train / args.n_models_saved) logger.info(f"Save models every {n_save} iterations, for a total of {args.n_models_saved}") # initialize potentials to 0 for iteration, data_dict in enumerate(dataloader_init): X = data_dict["X"].squeeze(dim=0) Y = data_dict["Y"].squeeze(dim=0) torch_losses = { "potential_u_initialization_loss": identity_loss_fn(0.0, potential_u(X)), "potential_v_initialization_loss": identity_loss_fn(0.0, potential_v(Y)) } torch_losses_take_step(loss_dict=torch_losses, optimizer=opt_potential, loss_names=["potential_u_initialization_loss", "potential_v_initialization_loss"]) roll_average(loss_dict=torch_losses, mets_dict=mets, metrics=["potential_u_initialization_loss", "potential_v_initialization_loss"], iteration=iteration) if (iteration + 1) % n_stats_to_tensorboard == 0: crayon_ship_metrics(ccexp, mets, ["identity_loss"],
n_stats_to_tensorboard = args.crayon_send_stats_iters logger.info( f"Sending stats to tensorboard every {n_stats_to_tensorboard} iterations") # how many times we save model n_save: int = round(args.n_train / args.n_models_saved) logger.info( f"Save models every {n_save} iterations, for a total of {args.n_models_saved}" ) # initialize network to the identity for iteration, data_dict in enumerate(dataloader_init): X = data_dict["X"].squeeze(dim=0) TX = neural_map(X) torch_losses = {"identity_loss": identity_loss_fn(X, TX)} torch_losses_take_step(loss_dict=torch_losses, optimizer=opt_tm, loss_names=["identity_loss"]) roll_average(loss_dict=torch_losses, mets_dict=mets, metrics=["identity_loss"], iteration=iteration) if (iteration + 1) % n_stats_to_tensorboard == 0: crayon_ship_metrics(ccexp, mets, ["identity_loss"], iteration) # iterations to evaluate on eval_iters: List[int] = []
# how many times we send stats to tensorboard n_stats_to_tensorboard = args.crayon_send_stats_iters logger.info(f"Sending stats to tensorboard every {n_stats_to_tensorboard} iterations") # how many times we save model n_save: int = round(args.n_train / args.n_models_saved) logger.info(f"Save models every {n_save} iterations, for a total of {args.n_models_saved}") # initialize network to the identity for iteration, data_dict in enumerate(dataloader_init): X = data_dict["X"].squeeze(dim=0) TX = neural_map(X) torch_losses = { "identity_loss" : identity_loss_fn(X, TX) } torch_losses_take_step(loss_dict=torch_losses, optimizer=opt_tm, loss_names=["identity_loss"]) roll_average(loss_dict=torch_losses, mets_dict=mets, metrics=["identity_loss"], iteration=iteration) if (iteration + 1) % n_stats_to_tensorboard == 0: crayon_ship_metrics(ccexp, mets, ["identity_loss"], iteration) # iterations to evaluate on