示例#1
0
def main():
    args = parser.parse_args()
    step = 0
    exp_name = f'{args.name}_{hp.max_lr}_{hp.cycle_length}'

    transforms = segtrans.JointCompose([segtrans.Resize(400),
                                        segtrans.RandomRotate(0, 90),
                                        segtrans.RandomCrop(256, 256),
                                        segtrans.ToTensor(),
                                        segtrans.Normalize(mean=hp.mean,
                                                           std=hp.std)])

    val_transforms = segtrans.JointCompose([segtrans.PadToFactor(),
                                            segtrans.ToTensor(),
                                            segtrans.Normalize(mean=hp.mean,
                                                               std=hp.std)])

    train_dataset = DSBDataset(f'{args.data}/train', transforms=transforms)
    val_dataset = DSBDataset(f'{args.data}/val', transforms=val_transforms)

    model = Unet()

    if args.checkpoint:
        checkpoint = torch.load(args.checkpoint)
        model.load_state_dict(checkpoint['state'])
        step = checkpoint['step']
        exp_name = checkpoint['exp_name']

    optimizer = Adam(model.parameters(), lr=hp.max_lr)

    if args.find_lr:
        scheduler = LRFinderScheduler(optimizer)
    else:
        scheduler = SGDRScheduler(optimizer, min_lr=hp.min_lr,
                                  max_lr=hp.max_lr, cycle_length=hp.cycle_length, current_step=step)

    model.cuda(device=args.device)
    train(model, optimizer, scheduler, train_dataset, val_dataset,
          n_epochs=args.epochs, batch_size=args.batch_size,
          exp_name=exp_name, device=args.device, step=step)
示例#2
0

plt.imshow(isbi.train[0].reshape(512, 512), cmap='gray');


# In[7]:


plt.imshow(isbi.targets[0].reshape(512, 512) , cmap='gray');


# In[8]:


unet = Unet()
unet.cuda();


# In[9]:


trainer = Trainer(unet)


# In[10]:


criterion = nn.BCEWithLogitsLoss()


# In[11]:
示例#3
0
	valid_data = Ultrasound_Dataset(valid_df,transform=transforms_valid)
	valid_loader = DataLoader(valid_data , batch_size = 4,shuffle=False)

	# Checking GPU Avalaibility

	use_cuda = True
	if use_cuda and torch.cuda.is_available():
		print('yes')
	print(torch.cuda.is_available())


	# Model Initialization

	model = Unet(1,net_type='semi_inception',version='b',add_residual=True)
	
	if use_cuda and torch.cuda.is_available():
		model.cuda()
	
	criterion = CustomLoss(0.5,1)

	optimizer = optim.Adam(model.parameters(),5e-6)
	scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', patience = 3)
	training_loss,valid_loss,model,saved_model=train_(model,optimizer,scheduler,criterion,train_loader,valid_loader,epochs=5)
	plot_learning_curve(training_loss,valid_loss)

	# save model for further use

	torch.save(model.state_dict(),'../Mymodel')


lr = 0.0005
weight_decay = 5e-5
lr_schedule = 0.985


def adjust_lr(optimizer, current_lr, schedule):
    current_lr = current_lr * schedule
    for param_group in optimizer.param_groups:
        param_group['lr'] = current_lr
    return current_lr


if __name__ == "__main__":
    args, unparsed = config.get_args()
    model = Unet(args)
    model = model.cuda()
    model.train()
    optimizer = torch.optim.Adam(model.parameters(),
                                 lr=lr,
                                 weight_decay=weight_decay)
    loss = MulticlassDiceLoss()

    train = get_file_list(brats_preprocessed_folder, train_ids_path)
    val = get_file_list(brats_preprocessed_folder, valid_ids_path)

    shapes = [brats_dataloader.load_patient(i)[0].shape[1:] for i in train]
    max_shape = np.max(shapes, 0)
    max_shape = list(np.max((max_shape, patch_size), 0))

    dataloader_train = brats_dataloader(train,
                                        batch_size,
示例#5
0
from datasets import ISBI2012Dataset
from trainers import Trainer

import warnings

warnings.filterwarnings("ignore")

transform = transforms.Compose([transforms.ToTensor()])

isbi = ISBI2012Dataset('./dataset/train-volume.tif',
                       './dataset/train-labels.tif',
                       transforms=transform)

# 训练U-Net
unet = Unet()
unet.cuda()

trainer = Trainer(unet)
criterion = nn.BCEWithLogitsLoss()
optimizer = optim.Adam(trainer.model.parameters(), lr=1e-3)
loss_history = trainer.fit_generator(isbi, criterion, optimizer, 25)

# 测试阶段
isbi1 = ISBI2012Dataset('./dataset/test-volume.tif',
                        './dataset/test-volume.tif',
                        transforms=transform)

preds = trainer.predict_generator(isbi1)

# 每个像素点取值为0或1,阈值为0.5
thresh = 0.5