model = Unet(in_channels=1,
             out_channels=3,
             paired_features=paired_features,
             pool_block=ResBlock,
             pool_kwargs={
                 'stride': 2
             },
             up_kwargs={
                 'attention': True
             },
             encode_block=ResBlockStack,
             encode_kwargs_fn=encode_kwargs_fn,
             decode_block=ResBlock).cuda()

patch_size = (160, 160, 80)
optimizer = optim.Adam(model.parameters(), lr=1e-4)
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer,
                                                 factor=0.2,
                                                 patience=30)

tr_transform = Compose([
    GammaTransform((0.9, 1.1)),
    ContrastAugmentationTransform((0.9, 1.1)),
    BrightnessMultiplicativeTransform((0.9, 1.1)),
    MirrorTransform(axes=[0]),
    SpatialTransform_2(
        patch_size,
        (90, 90, 50),
        random_crop=True,
        do_elastic_deform=True,
        deformation_scale=(0, 0.05),
    weight_save_dir = os.path.join('models', 'state_dict', now.strftime('%m%d%H%M'))
    os.makedirs(os.path.join(weight_save_dir), exist_ok=True)
    writer = SummaryWriter(os.path.join('log', now.strftime('%m%d%H%M')))
    iterate = start_iter
    for epo in range(start_epo, epoch):
        print("\nEpoch : {}".format(epo))
        for i, batch in enumerate(tqdm(dataloader)):
            #if i > 10:
            #    break
            opt_dec.zero_grad()
            opt_en.zero_grad()
            img = batch['image'].to(device)
            mask = batch['mask'].to(device)
            pred, loss = model(img, mask)
            loss.backward()
            torch.nn.utils.clip_grad_norm_(model.parameters(), 100)
            opt_dec.step()
            opt_en.step()
            writer.add_scalar('loss', float(loss), global_step=iterate)
            #if iterate % args.display_freq == 0:
            #    for masked in pred:
            #        writer.add_image('{}'.format(masked.size()[2]), masked, global_step=iterate)
            #    writer.add_image('GT', mask, iterate)
            #    writer.add_image('Image', img, iterate)

            if iterate % 200 == 0:
                if i != 0:
                    torch.save(
                        model.state_dict(),
                        os.path.join(weight_save_dir, '{}epo_{}step.ckpt'.format(epo, iterate))
                    )
Exemple #3
0
        init.xavier_normal(m.weight.data, gain=1)
    elif classname.find('Linear') != -1:
        init.xavier_normal(m.weight.data, gain=1)
    elif classname.find('BatchNorm2d') != -1:
        init.uniform(m.weight.data, 1.0, 0.02)
        init.constant(m.bias.data, 0.0)



# batch_size = 24
net = Unet().cuda(0)
net.apply(weights_init_xavier)
net.train(True)


optimizerD = optim.Adam(net.parameters(), lr=0.005, betas=(0.5, 0.999))

#Binary Cross Entropy
criterion = nn.BCELoss(size_average=True).cuda(0)

#The path of the data
data_path = './data/images/'
dst = ImageNet_Dataloader(data_path, is_transform=True)
print('length of the dataset', len(dst))
trainloader = data.DataLoader(dst, batch_size=24,shuffle=True)
step_index = 0

real_label = 1
fake_label = 0

# 500 Epoches