os.path.join(train_path, 'adni_train' + steps_per_epoch + '_set.npy'))
    list_training = np.append(list_training, adni_list)
    steps_per_epoch = len(train_idx) + len(adni_list)
    training_generator = DataGenerator(**params).mixed_training(
        list_training,
        train_path,
        test_path,
        areas,
        training_size=steps_per_epoch)

validation_generator = DataGenerator(**params).generate_validation(
    val_idx, areas, test_path)

# Construct the model
print("Constructing model")
fm = unet(params['dim_x'], params['dim_y'], params['dim_z'], 1,
          nCommon_multiple, old_classes, alpha_relu)
fm.load_weights(path_pretrained_network)

freeze_before_layer = 35

for l, layer in enumerate(fm.layers):
    if l < freeze_before_layer:  # layer.name not in frozen_layer_names:
        layer.trainable = False
    else:
        layer.trainable = True

# layers = [(layer, layer.name, layer.trainable) for layer in fm.layers]
# df = pd.DataFrame(layers, columns=['Layer Type', 'Layer Name', 'Layer Trainable'])
# print(df)

print('froze model before layer:', fm.layers[freeze_before_layer].name)
    # DC_own, DC_classes_i = Metrics(Y_true_sel, Y_pred).DSC()

    Y_true_new = np.zeros(Y_true_sel.shape)
    areas_incl_BG = areas.copy()
    areas_incl_BG.insert(0, 0)
    for i, item in enumerate(areas_incl_BG):
        Y_true_new = np.where(Y_true_sel==item, i, Y_true_new)

    AnatomicalPlaneViewer(np.squeeze(X_i), Y_pred, Y_pred).max_of_slice('prediction', DC_own)
    plt.show()

    AnatomicalPlaneViewer(np.squeeze(X_i), Y_true_new, Y_pred).show_differences(DC_own, False)
    plt.show()

    print("Constructing model")
    unet_model = unet(params['dim_x'], params['dim_y'], params['dim_z'], 1, nCommon_multiple, params['n_classes'], alpha_relu)
    Model_Summary = unet_model.summary()
    unet_model.load_weights(os.path.join(pred_path, 'ModelDetails0.001/model_weight.h5'))
    print("Loading pre-trained model")

    opt = Adam(lr=para_decay_auto['initial_lr'], beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)
    unet_model.compile(optimizer=opt, loss=[dice_loss], metrics=[dice_coef_prob])
    dimx = np.expand_dims(X_i, axis=0)
    dimy = np.expand_dims(Y_i, axis=0)
    results_i = unet_model.evaluate(dimx, dimy, batch_size=1)

    seg_images = unet_model.predict(np.expand_dims(X_i, 0), batch_size=1)
    Y_pred_data = np.argmax(seg_images, axis=-1)
    print(results_i)

    APV_params = {'X': np.squeeze(X_i),
    def whole_brain_network(self, X, weight_path):
        print('Using whole brain network for overlapping voxels')
        ref_shape = X.shape
        n_classes = 9
        n_overlap = int(np.sum(self.overlap))

        # image size Parameters
        params = {
            'batch_size': 1,
            'dim_x': ref_shape[0],
            'dim_y': ref_shape[1],
            'dim_z': ref_shape[2],
            'n_classes': n_classes,
            'shuffle': True,
            'verbose': False
        }

        para_decay_auto = {
            'initial_lr': initial_lr,
            'drop_percent': 0.5,
            'patience': 15,
            'threshold_epsilon': 0.0001,
            'momentum': 0.8,
            'nesterov': True
        }

        print("Constructing whole brain model")
        unet_model = unet(params['dim_x'], params['dim_y'], params['dim_z'], 1,
                          nCommon_multiple, n_classes, alpha_relu)
        unet_model.load_weights(weight_path)
        print("Loading pre-trained model from", weight_path)

        opt = Adam(lr=para_decay_auto['initial_lr'],
                   beta_1=0.9,
                   beta_2=0.999,
                   epsilon=1e-08,
                   decay=0.0)
        unet_model.compile(optimizer=opt,
                           loss=[dice_loss_all],
                           metrics=[dice_coef_prob])

        seg_images = unet_model.predict(np.expand_dims(X, 0), batch_size=1)
        Y_pred_data = np.squeeze(np.argmax(seg_images, axis=-1))
        location = np.where(self.overlap == 1)

        remove_overlap1 = np.where(self.overlap == 1, 0, self.Y1)
        remove_overlap2 = np.where(self.overlap == 1, 0, self.Y2)
        merged_Y = remove_overlap1 + remove_overlap2
        merged_Y = np.where(self.overlap == 1, np.nan, merged_Y)

        for n in np.arange(n_overlap):
            x, y, z = location[0][n], location[1][n], location[2][n]
            label1 = self.Y1[x, y, z]
            label2 = self.Y2[x, y, z]
            label_whole_brain = Y_pred_data[x, y, z]
            areas_in_lobe = lobes_to_areas(label_whole_brain)
            if areas_in_lobe == 0:
                merged_Y[x, y, z] = 0  # belongs to background
            elif label1 in areas_in_lobe:
                merged_Y[x, y, z] = label1
            elif label2 in areas_in_lobe:
                merged_Y[x, y, z] = label2
            elif label1 not in areas_in_lobe and label2 not in areas_in_lobe:
                # when it belongs to a lobe that has not been processed yet
                merged_Y[
                    x, y,
                    z] = 0  # it will hopefully be assigned later when that lobe is assessed
            else:
                Y1 = self.Y1
                Y2 = self.Y2
                print('Something else is going on and I don\'t know what')
        print('Finished labelling overlapping voxels')
        if np.isnan(merged_Y).any():
            print('Still NaN in merged Y')

        del unet_model  # will speed up process?

        return merged_Y