def train_entangled_pong_network(): # inputs input_shape = (1, 84, 84) filters = 32 kernel_size = 6 beta = 1.0 # entangled latent space epochs = 10 batch_size = 1 # define filename name = 'cvae_atari_entangled_pong' # builder hyperparameter dictionary hp_dictionary = { 'epochs': epochs, 'batch_size': batch_size, 'beta': beta, 'filters': filters, 'kernel_size': kernel_size, 'loss': 'vae_loss', 'optimizer': 'adam' } # define log directory log_dir = './summaries/' + experiment + '/' + utils.build_hyperparameter_string( name, hp_dictionary) + '/' # make VAE vae = PongEntangledConvolutionalLatentVAE(input_shape, log_dir, filters=filters, kernel_size=kernel_size, beta=beta) # compile VAE from keras import optimizers optimizer = optimizers.Adam(lr=1e-1) vae.compile(optimizer=optimizer) # get dataset train_directory = './atari_agents/record/train/' test_directory = './atari_agents/record/test/' train_generator = utils.atari_generator(train_directory, batch_size=batch_size) test_generator = utils.atari_generator(test_directory, batch_size=batch_size) train_size = utils.count_images(train_directory) test_size = utils.count_images(test_directory) # print summaries vae.print_model_summaries() # fit VAE steps_per_epoch = int(train_size / batch_size) validation_steps = int(test_size / batch_size) vae.fit_generator(train_generator, epochs=epochs, steps_per_epoch=steps_per_epoch, validation_data=test_generator, validation_steps=validation_steps)
def train_shallow(beta): # inputs input_shape = (1, 28, 28) epochs = 20 batch_size = 1 filters = 32 kernel_size = 6 pre_latent_size = 128 latent_size = 32 # define filename name = 'shallow' # builder hyperparameter dictionary hp_dictionary = { 'epochs': epochs, 'batch_size': batch_size, 'beta': beta, 'filters': filters, 'kernel_size': kernel_size, 'loss': 'vae_loss', 'optimizer': 'adam' } # define log directory log_dir = './summaries/' + experiment + '/' + utils.build_hyperparameter_string( name, hp_dictionary) + '/' # make VAE vae = ShallowDenseMNIST(input_shape, log_dir, filters=filters, kernel_size=kernel_size, pre_latent_size=pre_latent_size, latent_size=latent_size, beta=beta) # compile VAE from keras import optimizers optimizer = optimizers.Adam(lr=1e-4) vae.compile(optimizer=optimizer) # get dataset (X_train, _), (X_test, _) = utils.load_mnist() train_generator = utils.make_generator(X_train, batch_size=batch_size) test_generator = utils.make_generator(X_test, batch_size=batch_size) train_size = len(X_train) test_size = len(X_test) # print summaries vae.print_model_summaries() # fit VAE steps_per_epoch = int(train_size / batch_size) validation_steps = int(test_size / batch_size) vae.fit_generator(train_generator, epochs=epochs, steps_per_epoch=steps_per_epoch, validation_data=test_generator, validation_steps=validation_steps)