# Display Options. log_str = ("Autoencoder-{size} Training\n" " Running on {num_ipus} IPUs\n" " Dataset {training_data_file}\n" " Precision {precision}\n" " Logging to {logdir}\n" " Stochastic Rounding {prng}\n" "Training Graph\n" " Optimizer {optimizer}\n" " Batch Size {batch_size}\n" " Epochs {epochs}\n" " Base Learning Rates 2^{base_learning_rates}\n") if opts.loss_scaling: log_str += " Loss Scaling {loss_scaling}\n" if opts.weight_decay: log_str += " Weight Decay {weight_decay}\n" print(log_str.format(**vars(opts))) # load data print("Loading training data") training_data = AutoencoderData(data_file_name=opts.training_data_file) print("Users: {}".format(training_data.size)) print("Items: {}".format(training_data.input_size)) opts.input_size = training_data.input_size train_process_init(opts, training_data)
log_str += " Loss Scaling {loss_scaling}\n" if opts.weight_decay: log_str += " Weight Decay {weight_decay}\n" if not opts.no_validation: log_str += ("Validation Graph\n" " Dataset {validation_data_file}\n" " Batch Size {validation_batch_size}\n") if not opts.testing_on_checkpoint: log_str += "Checkpoint Path {checkpoint_path}\n" opts.learning_rate = (2**opts.base_learning_rate) * opts.batch_size print(log_str.format(**vars(opts))) # load data print("Loading training data") training_data = AutoencoderData(data_file_name=opts.training_data_file) print("Users: {}".format(training_data.size)) print("Items: {}".format(training_data.input_size)) print("Loading evaluation data") valid_data = AutoencoderData(data_file_name=opts.validation_data_file, training_data=training_data) print("Users: {}".format(valid_data.size)) print("Items: {}".format(valid_data.input_size)) if training_data.input_size != valid_data.input_size: raise ValueError( 'Number of items for training data and validation data must be' ' equal. Got {} and {}.'.format(training_data.input_size, valid_data.input_size))