def load_models(directory): generator = model.GlobalGenerator(n_downsampling=2, n_blocks=6) gen_name = os.path.join(directory, 'final_generator.pth') if os.path.isfile(gen_name): gen_dict = torch.load(gen_name) generator.load_state_dict(gen_dict) return generator.to(device)
def load_models(directory, batch_num): generator = model.GlobalGenerator() discriminator = model.NLayerDiscriminator(input_nc=3) gen_name = os.path.join(directory, '%05d_generator.pth' % batch_num) dis_name = os.path.join(directory, '%05d_discriminator.pth' % batch_num) if os.path.isfile(gen_name) and os.path.isfile(dis_name): gen_dict = torch.load(gen_name) dis_dict = torch.load(dis_name) generator.load_state_dict(gen_dict) discriminator.load_state_dict(dis_dict) print('Models loaded, resume training from batch %05d...' % batch_num) else: print('Cannot find saved models, start training from scratch...') batch_num = 0 return generator, discriminator, batch_num
def load_models(directory, batch_num): # 20180924: smaller network. generator = model.GlobalGenerator(n_downsampling=2, n_blocks=6) discriminator = model.NLayerDiscriminator(input_nc=3, n_layers=3) # 48 input gen_name = os.path.join(directory, '%05d_generator.pth' % batch_num) dis_name = os.path.join(directory, '%05d_discriminator.pth' % batch_num) if os.path.isfile(gen_name) and os.path.isfile(dis_name): gen_dict = torch.load(gen_name) dis_dict = torch.load(dis_name) generator.load_state_dict(gen_dict) discriminator.load_state_dict(dis_dict) print('Models loaded, resume training from batch %05d...' % batch_num) else: print('Cannot find saved models, start training from scratch...') batch_num = 0 return generator, discriminator, batch_num