# load Dataset train_dataset = Dataset(params, 'train') val_dataset = Dataset(params, 'val') train_data = DataLoader(train_dataset, batch_size=params.batch_size, shuffle=True, num_workers=1) # num_workers 几个线程参与读数据 valid_data = DataLoader(val_dataset, batch_size=params.batch_size, shuffle=False, num_workers=1) # build the model ae = AutoEncoder(params.n_attr).cuda() lat_dis = LatentDiscriminator(params.n_attr).cuda() ptc_dis = PatchDiscriminator().cuda() clf_dis = Classifier(params.n_attr).cuda() # trainer / evaluator trainer = Trainer(ae, lat_dis, ptc_dis, clf_dis, train_data, params) evaluator = Evaluator(ae, lat_dis, ptc_dis, clf_dis, valid_data, params) for n_epoch in range(params.n_epochs): logger.info('Starting epoch %i...' % n_epoch) for n_iter in range(0, params.epoch_size, params.batch_size): # latent discriminator training trainer.lat_dis_step()
#print "===" assert os.path.isfile(params.eval_clf) assert params.lambda_lat_dis == 0 or params.n_lat_dis > 0 assert params.lambda_ptc_dis == 0 or params.n_ptc_dis > 0 assert params.lambda_clf_dis == 0 or params.n_clf_dis > 0 # initialize experiment / load dataset logger = initialize_exp(params) data, attributes, data2, attributes2 = load_images(params) train_data = DataSampler(data[0], attributes[0], data2, attributes2, params) valid_data = DataSampler(data[1], attributes[1], None, None, params) # build the model ae = AutoEncoder(params).cuda() lat_dis = LatentDiscriminator(params).cuda() if params.n_lat_dis else None ptc_dis = PatchDiscriminator(params).cuda() if params.n_ptc_dis else None clf_dis = Classifier(params).cuda() if params.n_clf_dis else None eval_clf = torch.load(params.eval_clf).cuda().eval() # trainer / evaluator trainer = Trainer(ae, lat_dis, ptc_dis, clf_dis, train_data, params) evaluator = Evaluator(ae, lat_dis, ptc_dis, clf_dis, eval_clf, valid_data, params) for n_epoch in range(params.n_epochs): logger.info('Starting epoch %i...' % n_epoch) for n_iter in range(0, params.epoch_size, params.batch_size): # latent discriminator training