dataset = Dataset_train(data_dir=data_dir, fold=options.fold, input_size=input_size, normalize_mean=IMG_MEAN, normalize_std=IMG_STD, prob=options.prob) trainloader = data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=options.workers) # valset # this only a quick val dataset where all images are 321*321. valset = Dataset_val(data_dir=data_dir, fold=options.fold, input_size=input_size, normalize_mean=IMG_MEAN, normalize_std=IMG_STD) valloader = data.DataLoader(valset, batch_size=options.bs_val, shuffle=False, num_workers=options.workers, drop_last=False) save_pred_every = len(trainloader) optimizer = optim.SGD([{ 'params': get_10x_lr_params(model), 'lr': 10 * learning_rate }], lr=learning_rate,
optimizer = optim.SGD([{ 'params': get_10x_lr_params(model), 'lr': 10 * learning_rate }], lr=learning_rate, momentum=momentum, weight_decay=weight_decay) checkpoint_dir = 'checkpoint/fo=%d/' % options.fold check_dir(checkpoint_dir) # this only a quick val dataset where all images are 321*321. valset = Dataset_val(data_dir=data_dir, fold=options.fold, input_size=input_size, normalize_mean=IMG_MEAN, normalize_std=IMG_STD) valloader = data.DataLoader(valset, batch_size=options.bs_val, shuffle=False, num_workers=4, drop_last=False) model.cuda() model.eval() valset.history_mask_list = [None] * 451 all_inter = np.zeros([5]) all_union = np.zeros([5])