コード例 #1
0
def main(data_directory, exp_directory, epochs, batch_size):
    # Create the deeplabv3 resnet101 model which is pretrained on a subset
    # of COCO train2017, on the 20 categories that are present in the Pascal VOC dataset.
    model = createDeepLabv3()
    model.train()
    data_directory = Path(data_directory)
    # Create the experiment directory if not present
    exp_directory = Path(exp_directory)
    if not exp_directory.exists():
        exp_directory.mkdir()

    # Specify the loss function
    criterion = torch.nn.MSELoss(reduction='mean')
    # Specify the optimizer with a lower learning rate
    optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)

    # Specify the evaluation metrics
    metrics = {'f1_score': f1_score, 'auroc': roc_auc_score}

    # Create the dataloader
    dataloaders = datahandler.get_dataloader_single_folder(
        data_directory, batch_size=batch_size)
    _ = train_model(model,
                    criterion,
                    dataloaders,
                    optimizer,
                    bpath=exp_directory,
                    metrics=metrics,
                    num_epochs=epochs)

    # Save the trained model
    torch.save(model, exp_directory / 'weights.pt')
コード例 #2
0
# Specify the loss function
#criterion = torch.nn.MSELoss(reduction='mean')
# Dice/F1 score - https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient
criterion = smp.utils.losses.DiceLoss()

# Specify the optimizer with a lower learning rate
optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)

# Specify the evalutation metrics
# IoU/Jaccard score - https://en.wikipedia.org/wiki/Jaccard_index
metrics = {'f1_score': f1_score, 'auroc': roc_auc_score}
#metrics = [
#    smp.utils.metrics.IoU(threshold=0.5),
#]

# Create the dataloader
dataloaders = datahandler.get_dataloader_single_folder(data_dir,
                                                       batch_size=batchsize)
trained_model = train_model(model,
                            criterion,
                            dataloaders,
                            optimizer,
                            bpath=bpath,
                            metrics=metrics,
                            num_epochs=epochs)

# Save the trained model
torch.save({'model_state_dict': trained_model.state_dict()},
           os.path.join(bpath, 'state_dict.pt'))
torch.save(model, os.path.join(bpath, 'weights.pt'))
コード例 #3
0
        (128, 128, 0):5,     # Sky
        (0, 128, 0):6,       # Vegetation
        (128, 0, 0):7,       # Window
        (0, 0, 0):8          # Void
    }

mask = cv2.imread(mask_name)
m = np.zeros((mask.shape[0],mask.shape[1]))
for x in range(mask.shape[0]):
	for y in range(mask.shape[1]):
		for k in mapping:
			if np.all(mask[x,y,:]==k):
				m[x,y] = mapping[k]

(very slow process)	
'''

# Create the dataloader
dataloaders = datahandler.get_dataloader_single_folder(
    data_dir, batch_size=batchsize, class_id_list=class_id_list)
trained_model = train_model(model,
                            criterion,
                            dataloaders,
                            optimizer,
                            bpath=bpath,
                            metrics=metrics,
                            num_epochs=epochs)

# Save the trained model
# torch.save({'model_state_dict':trained_model.state_dict()},os.path.join(bpath,'weights'))
torch.save(model, os.path.join(bpath, 'weights.pt'))
コード例 #4
0
# Create the deeplabv3 resnet101 model which is pretrained on a subset of COCO train2017, on the 20 categories that are present in the Pascal VOC dataset.
model = createDeepLabv3(outputConfig)
model.train()
# Create the experiment directory if not present
if not os.path.isdir(bpath):
    os.mkdir(bpath)


# Specify the loss function
criterion = torch.nn.MSELoss(reduction='mean')
# Specify the optimizer with a lower learning rate
optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)

# Specify the evalutation metrics
metrics = {'f1_score': f1_score, 'auroc': roc_auc_score}


# Create the dataloader
dataloaders = datahandler.get_dataloader_single_folder(
    data_dir, imageFolder='images', maskFolder='masks', fraction=0.2, batch_size=batchsize)

trained_model = train_model(model, criterion, dataloaders,
                            optimizer, bpath=bpath, metrics=metrics, num_epochs=epochs)


# Save the trained model
# torch.save({'model_state_dict':trained_model.state_dict()},os.path.join(bpath,'weights'))
torch.save(model, os.path.join(bpath, 'weights.pt'))

コード例 #5
0
model = createDeepLabv3()
model.train()
# Create the experiment directory if not present
if not os.path.isdir(bpath): os.mkdir(bpath)

# Specify the loss function
criterion = torch.nn.MSELoss(reduction='mean')
# Specify the optimizer with a lower learning rate
optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)

# Specify the evalutation metrics
metrics = {'f1_score': f1_score, 'auroc': roc_auc_score}

# Create the dataloader
dataloaders = datahandler.get_dataloader_single_folder(data_dir,
                                                       imageFolder='Image',
                                                       maskFolder='Mask',
                                                       fraction=0.1,
                                                       batch_size=batchsize)
trained_model = train_model(model,
                            criterion,
                            dataloaders,
                            optimizer,
                            bpath=bpath,
                            metrics=metrics,
                            num_epochs=epochs)

# Save the trained model
# torch.save({'model_state_dict':trained_model.state_dict()},os.path.join(bpath,'weights'))
torch.save(model, os.path.join(bpath, 'weights.pt'))
コード例 #6
0
    os.mkdir(bpath)

# Specify the loss function
criterion = torch.nn.CrossEntropyLoss()
# Specify the optimizer with a lower learning rate
optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)

#scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=2)
#scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=1e-5, max_lr=1e-4)
#scheduler = None
# Specify the evalutation metrics
metrics = {'acc': accuracy_score}

# Create the dataloader
dataloaders = datahandler.get_dataloader_single_folder(data_dir,
                                                       batch_size=batchsize,
                                                       imageFolder='image',
                                                       maskFolder='mask')

testloaders = datahandler.get_dataloader_single_folder(val_dir,
                                                       batch_size=batchsize,
                                                       imageFolder='image',
                                                       maskFolder='mask')

trained_model = train_model(model,
                            criterion,
                            dataloaders,
                            testloaders,
                            optimizer,
                            bpath=bpath,
                            metrics=metrics,
                            num_epochs=epochs)