epoch_cl_val_acc = [] max_val_acc = 0 optimizer = optim.Adam(model.parameters(), lr=learning_rate_2) for epoch in range(n_epochs_2): # TRAIN model.train() correct = 0 train2_loss = 0 train_num = 0 # freeze params except for the classifier trained_names = ['classifier.0.bias', 'classifier.0.weight'] for name, param in model.named_parameters(): if name in trained_names: param.requires_grad = True else: param.requires_grad = False for i, (XI, XB, y) in enumerate(train_loader): if model.header == 'CNN': x = XI else: x = XB x, y = x.to(device), y.long().to(device) if x.size()[0] != batch_size: # print("batch {} size {} < {}, skip".format(i, x.size()[0], batch_size)) break train_num += x.size(0)
for epoch in range(n_epochs_2): # TRAIN model_B.train() model_I.train() correct_B = 0 train2_loss_B = 0 correct_I = 0 train2_loss_I = 0 train2_loss_tot = 0 train_num = 0 # freeze params except for the classifier trained_names = ['classifier.0.bias', 'classifier.0.weight'] for name, param in model_B.named_parameters(): if name in trained_names: param.requires_grad = True else: param.requires_grad = False for name, param in model_I.named_parameters(): if name in trained_names: param.requires_grad = True else: param.requires_grad = False for i, (XI, XB, y) in enumerate(train_loader): XI, XB, y = XI.to(device), XB.to(device), y.long().to(device) if XI.size()[0] != batch_size: # print("batch {} size {} < {}, skip".format(i, x.size()[0], batch_size))