def train_model(dataset: str, epochs: int, buffer_size: int, batch_size: int, latent_dim: int, data_save_path: str = None, save_models: bool = True) -> (Model, Model): setup() if not data_save_path: train_dataset = load_dataset(dataset=dataset, buffer_size=buffer_size, batch_size=batch_size) else: train_dataset = load_dataset(dataset=dataset, dataset_save_path=Path(data_save_path), buffer_size=buffer_size, batch_size=batch_size) generator_optimizer = tf.keras.optimizers.Adam(learning_rate=0.0002, beta_1=0.05) discriminator_optimizer = tf.keras.optimizers.Adam(learning_rate=0.0002, beta_1=0.05) discriminator = Discriminator() generator = Generator() gan = GAN(discriminator, generator, latent_dim) gan.compile(discriminator_optimizer, generator_optimizer, loss_fn) gan_home_path = Path(__file__).parent.parent.absolute() current_time = datetime.now().strftime("%Y%m%d%H%M%S") callbacks = [] tensorboard_callback = TensorBoard(log_dir=str(gan_home_path / "logs" / current_time)) callbacks.append(tensorboard_callback) if save_models: checkpoint_filepath = str(gan_home_path / "checkpoints" / f"{dataset}{epochs}_{current_time}" / "checkpoint") model_checkpoint_callback = ModelCheckpoint( filepath=checkpoint_filepath) callbacks.append(model_checkpoint_callback) _history = gan.fit(train_dataset, epochs=epochs, callbacks=callbacks) generator.summary() discriminator.summary() if save_models: generator.save(gan_home_path / "saved_models" / current_time / f"generator_{dataset}{epochs}", save_format="tf") discriminator.save(gan_home_path / "saved_models" / current_time / f"discriminator_{dataset}{epochs}", save_format="tf") return generator, discriminator
parser = argparse.ArgumentParser(description='Gans') parser.add_argument('--model_name', type=str, default=None) parser.add_argument('--load_from_epoch', type=int, default=0, help='load from epoch') parser.add_argument('--idx', type=int, default=0, help='idx') args = parser.parse_args() model_name = args.model_name idx = args.idx # NNs netG = Generator(tcode_dim=512, scode_dim=1024, emb_dim=128, hid_dim=128) netEs = ImgEncoder(num_chan=1, out_dim=1024) netEb = ImgEncoder(num_chan=3, out_dim=1024) netG = netG.cuda() netEs = netEs.cuda() netEb = netEb.cuda() data_name = model_name.split('_')[-1] datadir = os.path.join(data_root, data_name) model_folder = os.path.join(model_root, model_name) print('> Loading test data ...') dataset = BirdsDataset(datadir, mode='test') batch_size = 20 dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True)
help='Consistency module coefficient.') parser.add_argument('--unet_checkpoint', type=str, default='', help='Unet checkpoint') parser.add_argument('--emb_dim', type=int, default=128, metavar='N', help='Text and segmentation embeddim dim.') parser.add_argument('--n_plots', type=int, default=8, help='Number of images to plot on tensorboard') parser.add_argument('--scode_dim', type=int, default=1024, help='Segmentation code dimention') parser.add_argument('--manipulate', action='store_true', default=False, help='Framework for image manipulation.') args = parser.parse_args() # NNs netG = Generator(tcode_dim=512, scode_dim=args.scode_dim, emb_dim=args.emb_dim, hid_dim=128) netD = Discriminator() netS = Unet() netEs = ImgEncoder(num_chan=1, out_dim=args.scode_dim) netEb = ImgEncoder(num_chan=3, out_dim=args.scode_dim) netD = netD.cuda() netG = netG.cuda() netS = netS.cuda() netEs = netEs.cuda() netEb = netEb.cuda() data_name = args.dataset datadir = os.path.join(data_root, data_name) print('> Loading training data ...')
png_file = file + '.png' txt_file = file + '.txt' z_file = file + '.pickle' # cfgs data_name = 'birds' emb_dim = 128 scode_dim = 1024 # segmentation enconded dim # folders datadir = os.path.join(data_root, data_name) model_name = '{}_{}'.format(model_name, data_name) model_folder = os.path.join(model_root, model_name) # NNs netG = Generator(tcode_dim=512, scode_dim=scode_dim, emb_dim=emb_dim, hid_dim=128) netEs = ImgEncoder(num_chan=1, out_dim=scode_dim) netEb = ImgEncoder(num_chan=3, out_dim=scode_dim) # Dataset dataset = BirdsDataset(datadir, mode='test') dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True) # load models from checkpoint G_weightspath = os.path.join(model_folder, 'G_epoch{}.pth'.format(epoch)) D_weightspath = os.path.join(model_folder, 'D_epoch{}.pth'.format(epoch)) Es_weightspath = os.path.join(model_folder, 'Es_epoch{}.pth'.format(epoch)) Eb_weightspath = os.path.join(model_folder, 'Eb_epoch{}.pth'.format(epoch)) netG.load_state_dict(torch.load(G_weightspath)) netEs.load_state_dict(torch.load(Es_weightspath))