logger = initialize_log(params) # load Dataset train_dataset = Dataset(params, 'train') val_dataset = Dataset(params, 'val') train_data = DataLoader(train_dataset, batch_size=params.batch_size, shuffle=True, num_workers=1) # num_workers 几个线程参与读数据 valid_data = DataLoader(val_dataset, batch_size=params.batch_size, shuffle=False, num_workers=1) # build the model ae = AutoEncoder(params.n_attr).cuda() lat_dis = LatentDiscriminator(params.n_attr).cuda() ptc_dis = PatchDiscriminator().cuda() clf_dis = Classifier(params.n_attr).cuda() # trainer / evaluator trainer = Trainer(ae, lat_dis, ptc_dis, clf_dis, train_data, params) evaluator = Evaluator(ae, lat_dis, ptc_dis, clf_dis, valid_data, params) for n_epoch in range(params.n_epochs): logger.info('Starting epoch %i...' % n_epoch) for n_iter in range(0, params.epoch_size, params.batch_size): # latent discriminator training
def setUp(self): self.df = pd.read_csv(os.path.join('tests', './data_for_testing.csv')) self.device = torch.device( "cuda:0" if torch.cuda.is_available() else "cpu") self.model = AutoEncoder(self.df.shape[1])
assert not params.clf_dis_reload or os.path.isfile(params.clf_dis_reload) #print params.eval_clf #print "===" assert os.path.isfile(params.eval_clf) assert params.lambda_lat_dis == 0 or params.n_lat_dis > 0 assert params.lambda_ptc_dis == 0 or params.n_ptc_dis > 0 assert params.lambda_clf_dis == 0 or params.n_clf_dis > 0 # initialize experiment / load dataset logger = initialize_exp(params) data, attributes, data2, attributes2 = load_images(params) train_data = DataSampler(data[0], attributes[0], data2, attributes2, params) valid_data = DataSampler(data[1], attributes[1], None, None, params) # build the model ae = AutoEncoder(params).cuda() lat_dis = LatentDiscriminator(params).cuda() if params.n_lat_dis else None ptc_dis = PatchDiscriminator(params).cuda() if params.n_ptc_dis else None clf_dis = Classifier(params).cuda() if params.n_clf_dis else None eval_clf = torch.load(params.eval_clf).cuda().eval() # trainer / evaluator trainer = Trainer(ae, lat_dis, ptc_dis, clf_dis, train_data, params) evaluator = Evaluator(ae, lat_dis, ptc_dis, clf_dis, eval_clf, valid_data, params) for n_epoch in range(params.n_epochs): logger.info('Starting epoch %i...' % n_epoch) for n_iter in range(0, params.epoch_size, params.batch_size):
params.max_fm = params.max_fm + params.n_attr params.max_fm_orig = params.max_fm if not params.ae_teacher_reload: params.ae_teacher_reload = params.ae_reload # typically teacher weights are used for initializating the student (when possible), so these two are the same # initialize experiment / load dataset DATAROOT = '/data/tmp' logger = initialize_exp(params) data, attributes = load_images(params) train_data = DataSampler(data[0], attributes[0], params) valid_data = DataSampler(data[1], attributes[1], params) # build the trainable model ae = AutoEncoder(params).cuda() # build the Teacher model (if required) if params.lambda_jacobian > 0: params2 = params params2.max_fm = 512 + params.n_attr ae_teacher = AutoEncoder(params2).cuda() params.max_fm = params.max_fm_orig else: ae_teacher = None # trainer / evaluator trainer = Trainer(ae, ae_teacher, train_data, params) evaluator = Evaluator(ae, ae_teacher, valid_data, params)