def __init__(self, params_dict): """Constructor Args: params_dict (dict): Contains training/model parameters """ self.save_dir = file_utils.make_save_directory("cWGAN") self.num_critic_iters = params_dict["num_critic_iters"] self.batch_size = params_dict["batch_size"] clip_value = params_dict["clip_value"] noise_dims = params_dict["noise_dims"] gen_lr = params_dict["gen_lr"] critic_lr = params_dict["critic_lr"] gp_weight = params_dict["gp_weight"] self.model = cWGAN_mnist(clip_value, noise_dims, gen_lr, critic_lr, gp_weight) self.data = data_utils.load_mnist_data() self.epochs = params_dict["epochs"] self.num_training_examples = len(self.data[0]) self.weight_saving_interval = params_dict["weight_saving_interval"] self.critic_losses = [] self.generator_losses = [] self.wass_estimates = []
def __init__(self, params_dict): """Constructor Args: params_dict (dict): Contains training/model parameters """ self.save_dir = file_utils.make_save_directory("cWGAN") self.num_critic_iters = params_dict["num_critic_iters"] self.batch_size = params_dict["batch_size"] gen_lr = params_dict["gen_lr"] critic_lr = params_dict["critic_lr"] optimizer = params_dict["optimizer"] noise_dims = params_dict["noise_dims"] gp_weight = params_dict["gp_weight"] load_previous = params_dict["load_previous"] weights_path = params_dict["weights_path"] iteration = params_dict["iteration"] self.model = cWGAN(noise_dims, optimizer, gen_lr, critic_lr, gp_weight, load_previous, weights_path, iteration) data_path = params_dict["data_path"] if params_dict["data_type"] == "angular": if params_dict["data_scaling"] == "inverse": self.data = data_utils.load_jet_data_inverse_scaling(data_path) elif params_dict["data_scaling"] == "log": self.data = data_utils.load_jet_data_log_scaling(data_path) elif params_dict["data_scaling"] == "minmax": self.data = data_utils.load_jet_data(data_path) else: print("There was an error loading the data") print( "Please specify a data scaling scheme in your configuration file: The options are: inverse, log, and minmax" ) sys.exit(1) elif params_dict["data_type"] == "cartesian": self.data = data_utils.load_jet_data_log_scaling_cartesian( data_path) else: print("There was an error loading the data") print( "Please specify the form of the data your are training on, either:" ) print(" angular: Four-momenta in form (Pt, Eta, Phi, E)") print(" cartesian: Four-momenta in form (Px, Py, Pz, E)") sys.exit(1) self.epochs = params_dict["epochs"] self.num_training_examples = len(self.data[0]) self.weight_saving_interval = params_dict["weight_saving_interval"] self.critic_losses = [] self.generator_losses = [] self.wass_estimates = []
def __init__(self, params_dict): """Constructor Args: """ self.save_dir = file_utils.make_save_directory("Classifier") self.model = build_model() self.model.compile( optimizer=tf.keras.optimizers.Adam( learning_rate=params_dict['lr']), loss=tf.keras.losses.BinaryCrossentropy(), metrics=[tf.keras.metrics.BinaryAccuracy()], ) self.batch_size = params_dict["batch_size"] self.data = data_utils.load_classifier_data(params_dict["data_path"]) self.checkpoint_path = os.path.join(self.save_dir, "training/cp.cpkt") self.epochs = params_dict["epochs"]