Esempio n. 1
0
def main():
    x1, y1 = data.get_training_data()
    x2, y2 = data.get_training_data2()


    x = np.concatenate((x1, x2), axis=0)
    y = np.concatenate((y1, y2), axis=0)

             
    from models.sdf_model import get_CNN_SDFt, get_flat_tanh_CNN_SDFt

    # # sdf flat tanh
    # train, val = data_g.get_train_val_iterators(aug=None)

    # model = get_flat_tanh_CNN_SDFt()
    # model_name = 'SDF_cnn_scaled_tanh_EXTDATA_noaug'

    # train_sub(model, model_name, x, y, epochs=100)
    # submission.create(model, model_name)


    # sdf normal tanh

    model = get_CNN_SDFt()
    model_name = 'SDF_cnn_tanh_EXTDATA_noaug'

    train_sub(model, model_name, x, y, epochs=100)
    submission.create(model, model_name)
Esempio n. 2
0
def calc_metrics(model):

    x1, y1 = data.get_training_data()
    _, x, _, y = train_test_split(x1, y1, test_size=0.3, random_state=42424242)

    y_pred = model.predict(x)

    acc = tf.keras.metrics.Accuracy()
    mIoU = tf.keras.metrics.MeanIoU(num_classes=2)

    print('acc', acc(y, y_pred), 'mIoU', mIoU(y, y_pred), 'f1', f1(y, y_pred))
Esempio n. 3
0
def main():
    path_train = ('input_full/train/image/', 'input_full/train/label/')
    path_val = ('input_full/val/image/', 'input_full/val/label/')
    
    os.makedirs(path_train[0], exist_ok = True)
    os.makedirs(path_train[1], exist_ok = True)
    os.makedirs(path_val[0], exist_ok = True)
    os.makedirs(path_val[1], exist_ok = True)
    
    original, original_label = data.get_training_data()
    original_train, original_val, original_train_label, original_val_label = train_test_split(original, original_label, test_size=0.3, random_state=42424242)
    
    save_image_label_list(original_train, original_train_label, 'original', path_train)
    save_image_label_list(original_val, original_val_label, 'original', path_val)
    
    random.seed(42424242)
    for i in range(original_train.shape[0]):
        augment_image(original_train[i], 'original_' + str(i), path_train[0])
    
    
    random.seed(42424242)
    for i in range(original_train_label.shape[0]):
        augment_label(original_train_label[i], 'original_' + str(i), path_train[1])
    
    
    chicago_image_files = sorted(glob.glob('chicago_data/split_quarter/images/*.png'))
    chicago_label_files = sorted(glob.glob('chicago_data/split_quarter/labels/*.png'))
    
    random.seed(42424242)
    for i in range(len(chicago_image_files)):
        image = Image.open(chicago_image_files[i])
        image = np.asarray(image) / 255.
        save_image(image, path_train[0] + 'chicago_' + str(i) + '.png')
        augment_image(image, 'chicago_' + str(i), path_train[0])
    
    random.seed(42424242)
    for i in range(len(chicago_image_files)):
        label = Image.open(chicago_label_files[i])
        label = np.asarray(label) / 255.
        save_image(label, path_train[1] + 'chicago_' + str(i) + '.png')
        augment_label(label, 'chicago_' + str(i), path_train[1])
def main():
    x1, y1 = data.get_training_data()
    x2, y2 = data.get_training_data2()

    x = np.concatenate((x1, x2), axis=0)
    y = np.concatenate((y1, y2), axis=0)

    #x, y = data.augment_data(x, y)

    # x, y = data.augment_data(x,y)

    # # crossentropy
    # model = unet.get_model(None, None, 3, do_compile=False)
    # model.compile(optimizer='adam', loss='binary_crossentropy',
    #               metrics=['accuracy', tf.keras.metrics.MeanIoU(num_classes=2)])
    # model_name = 'u_net_cross_entropy_test'
    # train_sub(model, model_name, x, y, (x_test, y_test), epochs=100)

    # # focal
    # from losses import focal
    # loss = focal.focal_loss
    # model_name = 'u_net_focal_loss'
    # model = unet.get_model(None, None, 3, do_compile=False)
    # model.compile(optimizer='adam', loss=loss,
    #               metrics=['accuracy', tf.keras.metrics.MeanIoU(num_classes=2)])
    # train_sub(model, model_name, x, y, epochs=1)

    # dice
    from losses import dice
    loss = dice.dice_loss
    model_name = 'cnn_dice_EXTDATA_100e_nostop'
    model = cnn.get_model(None, None, 3, do_compile=False)
    model.compile(
        optimizer='adam',
        loss=loss,
        metrics=['accuracy',
                 tf.keras.metrics.MeanIoU(num_classes=2), f1])
    train_sub(model, model_name, x, y, epochs=100)
def cross_val(model, model_name, load_training_data=True, x=None, y=None, augment_data_func=None, use_class_weight=False, epochs=100, batch_size=4, verbose=2):
    print('\n\n\n' '5-Cross-Validation: ' + model_name)
    
    if load_training_data:
        x, y = data.get_training_data()
    
    kf = KFold(n_splits=5, shuffle=True, random_state=42424242)
    
    reset_weights = model.get_weights()  # for reseting the model weights

    histories = []
    index = 0
    best_losses = []
    for train_index, test_index in kf.split(x):
        print('\nSplit k=' + str(index))
        x_train, x_test = x[train_index], x[test_index]
        y_train, y_test = y[train_index], y[test_index]

        if augment_data_func != None:
            x_train, y_train = augment_data_func(x_train, y_train)
    
        name = model_name + '_crossval-k' + str(index)
        crt_history = fit(model, x_train, y_train, epochs=epochs, validation_data=(x_test, y_test), checkpoint_suffix=name, batch_size=batch_size, verbose=verbose,
                        use_class_weight=use_class_weight)
        histories.append(crt_history)
        
        best_epoch = get_min_index(crt_history.history['loss'])
        best_loss = crt_history.history['loss'][best_epoch]
        best_losses.append(best_loss)
    
        index += 1
        model.set_weights(reset_weights)  # reset the model weights



    # EVALUATION

    print("\nCROSS-VALIDATION-RESULTS")
    print("model_name: " + model_name)
    #print("optimizer: " + str(model.optimizer))
    #print("loss: " + str(model.loss))
    print("epochs: 100, early_stopping_patience = " + str(PATIENCE))


    print('\nMETRICS')
    # get used metrics
    keys = histories[0].history.keys()
    # average of metrics over all data splits
    average_metrics = {}
    index = 0
    for h in histories:
        best_index = get_min_index(h.history['loss'])
        current_metrics = {}
        for k in keys:
            if k not in average_metrics:
                average_metrics[k] = h.history[k][best_index]
            else:
                average_metrics[k] += h.history[k][best_index]
            
            current_metrics[k] = h.history[k][best_index]
        print('k='+str(index), current_metrics)
        index += 1
            
    for k in average_metrics:
        average_metrics[k] /= len(histories)


    print("\nAVERAGE-METRICS")
    print(average_metrics)

    # reload first split model weights
    model.load_weights("checkpoints/ckp_" + model_name + '_crossval-k' + '0' + ".h5")
    # create submission
    submission.create(model, model_name + '_crossval-k' + '0' + ".h5")
def main():
    x1, y1 = data.get_training_data()
    x2, y2 = data.get_training_data2()

    print('x1, y1', x1.shape, y1.shape)
    x1_train, x_test, y1_train, y_test = train_test_split(
        x1, y1, test_size=0.3, random_state=42424242)

    print('x1_train, x2', x1_train.shape, x2.shape, x1_train.dtype, x2.dtype)
    x = np.concatenate((x1_train, x2), axis=0)
    print(y1_train.shape, y2.shape)
    y = np.concatenate((y1_train, y2), axis=0)

    #x, y = data.augment_data(x, y)

    # x, y = data.augment_data(x,y)

    # crossentropy
    model = unet.get_model(400, 400, 3, do_compile=False)
    model.compile(optimizer='adam',
                  loss='binary_crossentropy',
                  metrics=[
                      'accuracy',
                      tf.keras.metrics.MeanIoU(num_classes=2), f1, f1_binary
                  ])
    model_name = 'u_net2_crossentropy_EXTDATA_FS_1'

    train_sub(model, model_name, x, y, (x_test, y_test), epochs=100)

    # dice
    from losses import dice
    loss = dice.dice_loss

    model_name = 'u_net_dice'
    model = unet.get_model(400, 400, 3, do_compile=False)
    model.compile(optimizer='adam',
                  loss=loss,
                  metrics=[
                      'accuracy',
                      tf.keras.metrics.MeanIoU(num_classes=2), f1, f1_binary
                  ])
    model_name = 'u_net2_dice_EXTDATA_FS_1'

    train_sub(model, model_name, x, y, (x_test, y_test), epochs=100)

    # u_net_focal
    from losses import focal
    loss = focal.focal_loss
    model = unet.get_model(400, 400, 3, do_compile=False)
    model.compile(optimizer='adam',
                  loss=loss,
                  metrics=[
                      'accuracy',
                      tf.keras.metrics.MeanIoU(num_classes=2), f1, f1_binary
                  ])
    model_name = 'u_net2_focal_EXTDATA_FS_1'

    train_sub(model, model_name, x, y, (x_test, y_test), epochs=100)

    # lovasz
    from losses import lovasz
    loss = lovasz.lovasz_loss
    model = unet.get_model(400, 400, 3, do_compile=False)
    model.compile(optimizer='adam',
                  loss=loss,
                  metrics=[
                      'accuracy',
                      tf.keras.metrics.MeanIoU(num_classes=2), f1, f1_binary
                  ])
    model_name = 'u_net2_lovasz_EXTDATA_FS_1'

    train_sub(model, model_name, x, y, (x_test, y_test), epochs=100)
Esempio n. 7
0
def main():
    from models import simple_patch_conv
    from submission.mask_to_submission import patch_to_label
    num_filters = 1024  # this is chosen somewhat arbitrarily, maybe try some different numbers
    batch_size = 8
    epochs = 200

    def transform_y(y):
        l = []
        num_patches = (int(y.shape[1] / 16), int(y.shape[2] / 16))
        for img_idx in range(y.shape[0]):
            img = np.empty(num_patches)
            for i in range(num_patches[0]):
                for j in range(num_patches[1]):
                    patch = y[img_idx, i * 16:(i + 1) * 16,
                              j * 16:(j + 1) * 16]
                    img[i, j] = patch_to_label(patch)
            l.append(img)
        return np.asarray(l)

#################
### Testing Area
###

    from visualize.show_img import show_image_single, show_image, show_image_pred, blend_image

    x, y = data.get_training_data()
    x, y = data.augment_data(x, y)
    y = transform_y(y)
    x_test, x_test_names = data.get_test_data()

    from losses import dice, lovasz
    loss_test = 'binary_crossentropy'
    #loss_test = dice.dice_loss
    #loss_test = lovasz.lovasz_loss
    model = simple_patch_conv.get_model(None,
                                        None,
                                        3,
                                        num_filters=num_filters,
                                        do_compile=False,
                                        do_upsampling=False)
    model.compile(optimizer='adam',
                  loss=loss_test,
                  metrics=[
                      'accuracy',
                      tf.keras.metrics.MeanIoU(num_classes=2), f1, f1_binary
                  ])
    model_name = 'spc_test'
    fit(model,
        x,
        y,
        epochs=epochs,
        validation_split=0.1,
        validation_data=None,
        checkpoint_suffix=model_name,
        batch_size=batch_size,
        verbose=2)

    x_pred = np.squeeze(model.predict(x))
    x_pred_image = (x_pred > 0.5).astype(np.uint8)
    #for i in range(x_pred.shape[0]):
    for i in range(10):
        show_image_pred(x_pred[i], x_pred_image[i], y[i])

    x_test_pred = np.squeeze(model.predict(x_test))
    x_test_pred_image = (x_test_pred > 0.5).astype(np.uint8)
    #for i in range(x_test_pred.shape[0]):
    for i in range(10):
        show_image_pred(
            x_test_pred[i], x_test_pred_image[i],
            blend_image(
                x_test[i],
                np.kron(x_test_pred_image[i], np.ones((16, 16), dtype=int))))
Esempio n. 8
0
def cross_val(model, model_name, epochs=100, batch_size=8, verbose=2):

    x, y = data.get_training_data()

    kf = KFold(n_splits=5, shuffle=True, random_state=random_state)

    histories = []
    index = 0
    best_losses = []
    current_name = ''
    for train_index, test_index in kf.split(x):
        x_train, x_test = x[train_index], x[test_index]
        y_train, y_test = y[train_index], y[test_index]

        print('augment data')
        x_train, y_train = data.augment_data_extended(x_train,
                                                      y_train,
                                                      num_random_rotations=3)

        current_name = model_name + '_crossval-k' + str(index)
        crt_history = fit(model,
                          x_train,
                          y_train,
                          epochs=epochs,
                          validation_data=(x_test, y_test),
                          checkpoint_suffix=current_name,
                          batch_size=batch_size,
                          verbose=verbose)
        histories.append(crt_history)

        best_epoch = get_min_index(crt_history.history['loss'])
        best_loss = crt_history.history['loss'][best_epoch]
        best_losses.append(best_loss)

        model.load_weights(current_name + '.h5')

        index += 1

    # get used metrics
    keys = histories[0].history.keys()

    # average of metrics over all data splits
    average = {}
    for h in histories:
        best_index = get_min_index(h.history['loss'])
        for k in keys:
            if k not in average:
                average[k] = h.history[k][best_index]
            else:
                average[k] += h.history[k][best_index]
    for k in average:
        average[k] /= len(histories)

    print("\nCross-Validation")
    print("model_name: " + model_name)
    print("optimizer: " + str(model.optimizer))
    print("loss: " + str(model.loss))
    print("epoches: 100, early_stopping_patience = 9")
    print("cross_val_seed: " + str(random_state))
    print("AVERAGE-METRICS")
    print(average)

    # reload best model weights
    #best_model_index = get_min_index(best_losses)
    #model.load_weights("cps/ckp_" + model_name + '_crossval-k' + str(best_model_index) + ".h5")
    #print("best model: cps/ckp_" + model_name + '_crossval-k' + str(best_model_index) + ".h5")

    create_sub(model, model_name)