options['training_samples'] = curr_train_patients
    options['test_samples'] = [curr_test_patient]

    experiment_folder = jp(path_res, experiment_name)
    create_folder(experiment_folder)

    path_results = jp(experiment_folder, "fold" + str(fold).zfill(2))
    create_folder(path_results)
    path_models = jp(path_results, "models")
    create_folder(path_models)
    path_segmentations = jp(path_results, "results")
    create_folder(path_segmentations)

    # Organize the data in a dictionary
    input_dictionary = create_training_validation_sets(options,
                                                       dataset_mode="l")

    input_dictionary = expand_dictionary(input_dictionary)

    transf = transforms.ToTensor()

    print('Training data: ')
    training_dataset = SlicesLoaderLoadAll(
        input_data=input_dictionary['input_train_data'],
        labels=input_dictionary['input_train_labels'],
        roi=input_dictionary['input_train_rois'],
        normalize=options['normalize'],
        norm_type=options['norm_type'])

    hola = training_dataset.__getitem__(100)
options['normalize'] = True
options['norm_type'] = 'zero_one'
options['batch_size'] = 10
options['patience'] = 20  #Patience for the early stopping
options['gpu_use'] = True
options['num_epochs'] = 200
options['optimizer'] = 'adam'
# Patch sampling type
#options['patch_sampling'] = 'mask' # (mask, balanced or balanced+roi or non-uniform)
# Loss
options['loss'] = 'dice'  # (dice, cross-entropy)
# Whether or not to re-sample each epoch for training patches
options['resample_each_epoch'] = False

# Organize the data in a dictionary
input_dictionary = create_training_validation_sets(options)

#Show one subject with brain mask
#case_to_show = list(input_dictionary['input_val_data'].keys())[0] #First case of validation set
#shim_overlay(nib.load(input_dictionary['input_val_data'][case_to_show][0]).get_fdata(), nib.load(input_dictionary['input_val_rois'][case_to_show][0]).get_fdata(), 16, alpha=0.5)

# Create training, validation and test patches

transf = transforms.ToTensor()
"""

print('Training data: ')
training_dataset = SlicesLoader(input_data=input_dictionary['input_train_data'],
                                       labels=input_dictionary['input_train_labels'],
                                       roi=input_dictionary['input_train_rois'],
                                       normalize=options['normalize'],
Esempio n. 3
0
    options['training_samples'] = curr_train_patients
    options['test_samples'] = [curr_test_patient]

    experiment_folder = jp(path_res, experiment_name)
    create_folder(experiment_folder)

    path_results = jp(experiment_folder, "fold" + str(fold).zfill(2))
    create_folder(path_results)
    path_models = jp(path_results, "models")
    create_folder(path_models)
    path_segmentations = jp(path_results, "results")
    create_folder(path_segmentations)

    # Organize the data in a dictionary
    input_dictionary = create_training_validation_sets(
        options, dataset_mode="l", specific_val=[validation_images[fold - 1]])

    # Create training, validation and test patches

    rotation_angle = 5
    transf = transforms.Compose([
        RandomFlipX(),
        RandomFlipY(),
        RandomFlipZ(),
        #RandomRotationXY(rotation_angle),
        #RandomRotationYZ(rotation_angle),
        #RandomRotationXZ(rotation_angle),
        ToTensor3DPatch()
    ])

    print('Training data: ')