Ejemplo n.º 1
0
LABELS_DICT = {
        1: 'SOL',
        2: 'GM',
        3: 'GL',
        4: 'TA',
        5: 'ELD',
        6: 'PE',
        }
'''
MODEL_RESOLUTION = np.array([1.037037, 1.037037])
MODEL_SIZE = (432, 432)
MODEL_SIZE_SPLIT = (250, 250)

image_list, mask_list = pretrain.common_input_process_split(
    LABELS_DICT, MODEL_RESOLUTION, MODEL_SIZE, MODEL_SIZE_SPLIT, {
        'image_list': image_list,
        'resolution': MODEL_RESOLUTION
    }, seg_list)

ch = mask_list[0].shape[2]
aggregated_masks = []
mask_list_no_overlap = []
for masks in mask_list:
    agg, new_masks = pretrain.calc_aggregated_masks_and_remove_overlap(masks)
    aggregated_masks.append(agg)
    mask_list_no_overlap.append(new_masks)

for slice_number in range(len(image_list)):
    img = image_list[slice_number]
    segmentation = model.predict(
        np.expand_dims(np.stack([img, np.zeros(MODEL_SIZE_SPLIT)], axis=-1),
Ejemplo n.º 2
0
def thigh_incremental_mem(modelObj: DynamicDLModel,
                          trainingData: dict,
                          trainingOutputs,
                          bs=5,
                          minTrainImages=5):
    import dl.common.preprocess_train as pretrain
    from dl.common.DataGenerators import DataGeneratorMem
    import os
    from keras.callbacks import ModelCheckpoint
    from keras import optimizers
    import time
    try:
        np
    except:
        import numpy as np

    from dl.labels.thigh import inverse_labels

    MODEL_RESOLUTION = np.array([1.037037, 1.037037])
    MODEL_SIZE = (432, 432)
    MODEL_SIZE_SPLIT = (250, 250)
    BAND = 49
    BATCH_SIZE = bs
    CHECKPOINT_PATH = os.path.join(".", "Weights_incremental_split", "thigh")
    MIN_TRAINING_IMAGES = minTrainImages

    os.makedirs(CHECKPOINT_PATH, exist_ok=True)

    t = time.time()
    print('Image preprocess')

    image_list, mask_list = pretrain.common_input_process_split(
        inverse_labels, MODEL_RESOLUTION, MODEL_SIZE, MODEL_SIZE_SPLIT,
        trainingData, trainingOutputs)

    print('Done. Elapsed', time.time() - t)
    nImages = len(image_list)

    if nImages < MIN_TRAINING_IMAGES:
        print("Not enough images for training")
        return

    print("image shape", image_list[0].shape)
    print("mask shape", mask_list[0].shape)

    print('Weight calculation')
    t = time.time()

    output_data_structure = pretrain.input_creation_mem(
        image_list, mask_list, BAND)

    print('Done. Elapsed', time.time() - t)

    card = len(image_list)
    steps = int(float(card) / BATCH_SIZE)

    print(f'Incremental learning for thigh with {nImages} images')
    t = time.time()

    netc = modelObj.model
    checkpoint_files = os.path.join(
        CHECKPOINT_PATH, "weights - {epoch: 02d} - {loss: .2f}.hdf5")
    training_generator = DataGeneratorMem(output_data_structure,
                                          list_X=list(range(steps *
                                                            BATCH_SIZE)),
                                          batch_size=BATCH_SIZE,
                                          dim=MODEL_SIZE_SPLIT)
    #check = ModelCheckpoint(filepath=checkpoint_files, monitor='loss', verbose=0, save_best_only=False,save_weights_only=True, mode='auto', period=10)
    check = ModelCheckpoint(
        filepath=checkpoint_files,
        monitor='loss',
        verbose=0,
        save_best_only=True,  # save_freq='epoch',
        save_weights_only=True,
        mode='auto')
    adamlr = optimizers.Adam(learning_rate=0.001,
                             beta_1=0.9,
                             beta_2=0.999,
                             epsilon=1e-08,
                             amsgrad=True)
    netc.compile(loss=pretrain.weighted_loss, optimizer=adamlr)
    #history = netc.fit_generator(generator=training_generator, steps_per_epoch=steps, epochs=5, callbacks=[check], verbose=1)
    history = netc.fit(x=training_generator,
                       steps_per_epoch=steps,
                       epochs=5,
                       callbacks=[check],
                       verbose=1)
    print('Done. Elapsed', time.time() - t)