netS = Unet() netEs = ImgEncoder(num_chan=1, out_dim=args.scode_dim) netEb = ImgEncoder(num_chan=3, out_dim=args.scode_dim) netD = netD.cuda() netG = netG.cuda() netS = netS.cuda() netEs = netEs.cuda() netEb = netEb.cuda() data_name = args.dataset datadir = os.path.join(data_root, data_name) print('> Loading training data ...') if args.dataset == 'birds': dataset = BirdsDataset(datadir, mode='train') elif args.dataset == 'flowers': dataset = FlowersDataset(datadir, mode='train') dataloader = DataLoader(dataset, batch_size=args.batch_size, shuffle=True) # create model folder model_name = '{}_{}'.format(args.model_name, data_name) model_folder = os.path.join(model_root, model_name) if not os.path.exists(model_folder): os.makedirs(model_folder) print('> Model folder: %s' % model_folder) print('> Start training ...') print('>> Run tensorboard --logdir models/') train_gan(dataloader, model_folder, netG, netD, netS, netEs, netEb, args)
netS = Unet() netEs = ImgEncoder(num_chan=1, out_dim=args.scode_dim) netEb = ImgEncoder(num_chan=3, out_dim=args.scode_dim) netD = netD.cuda() netG = netG.cuda() netS = netS.cuda() netEs = netEs.cuda() netEb = netEb.cuda() data_name = args.dataset datadir = os.path.join(data_root, data_name) print('> Loading training data ...') if args.dataset == 'birds': dataset = BirdsDataset(datadir, mode='train',batch=args.batch_size) elif args.dataset == 'flowers': dataset = FlowersDataset(datadir, mode='train') dataloader = DataLoader(dataset, batch_size=args.batch_size, shuffle=True) # create model folder model_name = '{}_{}'.format(args.model_name, data_name) model_folder = os.path.join(model_root, model_name) if not os.path.exists(model_folder): os.makedirs(model_folder) print('> Model folder: %s' % model_folder) print('> Start training ...') print('>> Run tensorboard --logdir models/') train_gan(dataloader, model_folder, netG, netD, netS, netEs, netEb, args)
# NNs netG = Generator(tcode_dim=512, scode_dim=1024, emb_dim=128, hid_dim=128) netEs = ImgEncoder(num_chan=1, out_dim=1024) netEb = ImgEncoder(num_chan=3, out_dim=1024) netG = netG.cuda() netEs = netEs.cuda() netEb = netEb.cuda() data_name = model_name.split('_')[-1] datadir = os.path.join(data_root, data_name) model_folder = os.path.join(model_root, model_name) print('> Loading test data ...') dataset = BirdsDataset(datadir, mode='test') batch_size = 20 dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True) ''' load model ''' assert args.load_from_epoch != '', 'args.load_from_epoch is empty' G_weightspath = os.path.join(model_folder, 'G_epoch{}.pth'.format(args.load_from_epoch)) Es_weightspath = os.path.join( model_folder, 'Es_epoch{}.pth'.format(args.load_from_epoch)) Eb_weightspath = os.path.join( model_folder, 'Eb_epoch{}.pth'.format(args.load_from_epoch)) print('reload weights from {}'.format(G_weightspath)) print('reload weights from {}'.format(Es_weightspath)) print('reload weights from {}'.format(Eb_weightspath)) netG.load_state_dict(torch.load(G_weightspath))
parser.add_argument('--model_name', type=str, default='neural_dist') parser.add_argument('--dataset', type=str, default=None, help='which dataset to use [birds or flowers]') parser.add_argument('--margin', default=0.2, help='margin used in triplet loss') args = parser.parse_args() args.cuda = torch.cuda.is_available() data_name = args.dataset datadir = os.path.join(data_root, data_name) vs_model = ImgSenRanking(dim_image, sent_dim, hid_dim) img_encoder = ImageEncoder() vs_model = vs_model.cuda() img_encoder = img_encoder.cuda() print('> Loading test data ...') dataset_train = BirdsDataset(datadir, mode='train') dataset_test = BirdsDataset(datadir, mode='test') model_name = '{}_{}'.format(args.model_name, data_name) print('>> START training ') train_nd(dataset_train, dataset_test, model_root, model_name, img_encoder, vs_model, args)