################

trainer = autoregressive_train.AutoregressiveTrainer(
    model=model,
    data_loader=loader,
    params=trainer_params,
    snapshot_path=working_dir + '/snapshots',
    snapshot_name=args.run_name,
    snapshot_interval=args.num_iterations // 10,
    snapshot_exec_template=sbatch_executable,
    device=device,
    # logger=model_logging.Logger(validation_interval=None),
    logger=model_logging.TensorboardLogger(
        log_interval=500,
        validation_interval=1000,
        generate_interval=5000,
        log_dir=working_dir + '/logs/' + args.run_name,
        print_output=True,
    ))
if args.restore is not None:
    trainer.load_state(checkpoint)

print()
print("Model:", model.__class__.__name__)
print("Hyperparameters:", json.dumps(model.hyperparams, indent=4))
print("Trainer:", trainer.__class__.__name__)
print(
    "Training parameters:",
    json.dumps(
        {
            key: value
    model = autoregressive_model.AutoregressiveVAEFR(channels=args.channels, dropout_p=args.dropout_p)
model.to(device)

trainer = autoregressive_train.AutoregressiveVAETrainer(
    model=model,
    data_loader=loader,
    params=trainer_params,
    snapshot_path=working_dir + '/snapshots',
    snapshot_name=run_name,
    snapshot_interval=args.num_iterations // 10,
    snapshot_exec_template=sbatch_executable,
    device=device,
    # logger=model_logging.Logger(validation_interval=None),
    logger=model_logging.TensorboardLogger(
        log_interval=500,
        validation_interval=1000,
        generate_interval=5000,
        log_dir=working_dir + '/logs/' + run_name
    )
)
if args.restore is not None:
    trainer.load_state(checkpoint)
if args.no_lag_inf:
    trainer.params['lagging_inference'] = False
if args.lag_inf_max_steps is not None:
    trainer.params['lag_inf_inner_loop_max_steps'] = args.lag_inf_max_steps

print("Hyperparameters:", json.dumps(model.hyperparams, indent=4))
print("Training parameters:", json.dumps(trainer.params, indent=4))
print("Num trainable parameters:", model.parameter_count())

trainer.train(steps=args.num_iterations)