Пример #1
0
def run():
    images_path = '/data/buildings/test'
    input_size = (320, 320)
    batch_size = 2600
    weights = 'weights_train/weights.unet-experiment-{}.h5'

    generator, _ = create_generator(images_path,
                                    input_size,
                                    batch_size,
                                    1,
                                    rescale_masks=False,
                                    binary=True)

    images, masks = next(generator)

    for i, options in enumerate(experiments):
        print('Running prediction for experiment {}'.format(str(i)))
        dropout = options['dropout']

        model = build_unet(input_size, nb_classes=1, dropout=dropout)
        model.compile('sgd', 'mse')
        model.load_weights(weights.format(i))

        probs = model.predict(images, verbose=1)

        iou = batch_general_jaccard(masks, probs, binary=True)
        f1 = K.eval(f1_score(K.variable(masks), K.variable(probs)))
        print('mean IOU for {}: {}'.format(i, np.mean(iou)))
        print('F1 score for {}: {}'.format(i, f1))

        K.clear_session()
Пример #2
0
def lrtest_unet(data_dir, logdir, weights_dir, weights_name, input_size,
                nb_classes, batch_size, initial_epoch, pre_trained_weight,
                learning_rate, augment):
    session_config()
    model = build_unet(input_size, nb_classes)

    binary = nb_classes == 1
    if binary:
        loss = binary_soft_jaccard_loss
    else:
        loss = soft_jaccard_loss

    model.compile(optimizer=Adam(lr=learning_rate),
                  loss=loss,
                  metrics=['acc', binary_jaccard_distance_rounded])

    train_generator, num_samples = create_generator(os.path.join(
        data_dir, 'train'),
                                                    input_size,
                                                    batch_size,
                                                    nb_classes,
                                                    rescale_masks=False,
                                                    binary=binary,
                                                    augment=augment)

    steps_per_epoch = num_samples // batch_size
    if augment:
        steps_per_epoch = steps_per_epoch * 4

    clr = CyclicLR(base_lr=0,
                   max_lr=1e-1,
                   step_size=5 * steps_per_epoch,
                   mode='triangular')
    model.fit_generator(generator=train_generator,
                        steps_per_epoch=steps_per_epoch,
                        epochs=5,
                        verbose=True,
                        workers=8,
                        callbacks=[clr],
                        initial_epoch=initial_epoch)

    h = clr.history
    lr = h['lr']
    acc = h['acc']
    print(lr)
    print()
    print(acc)
Пример #3
0
def train_unet(data_dir, logdir, weights_dir, weights_name, input_size,
               nb_classes, batch_size, initial_epoch, pre_trained_weight,
               learning_rate, augment):
    session_config()
    model = build_unet(input_size, nb_classes)
    model.summary()
    binary = nb_classes == 1
    if binary:
        loss = binary_soft_jaccard_loss
    else:
        loss = soft_jaccard_loss

    model.compile(optimizer=Adam(lr=learning_rate),
                  loss=loss,
                  metrics=['acc', binary_jaccard_distance_rounded])

    train_generator, num_samples = create_generator(
        os.path.join(data_dir, 'train'),
        input_size,
        batch_size,
        nb_classes,
        rescale_masks=True,
        binary=binary,
        augment=augment,
        mean=np.array([[[0.01279744, 0.01279744, 0.01279744]]]),
        std=np.array([[[0.11312577, 0.11312577, 0.11312577]]]))

    val_generator, val_samples = create_generator(
        os.path.join(data_dir, 'val'),
        input_size,
        batch_size,
        nb_classes,
        rescale_masks=True,
        binary=binary,
        augment=augment,
        mean=np.array([[[0.01279744, 0.01279744, 0.01279744]]]),
        std=np.array([[[0.11312577, 0.11312577, 0.11312577]]]))

    if pre_trained_weight:
        print('Loading weights: {}'.format(pre_trained_weight))
        model.load_weights(pre_trained_weight)
    steps_per_epoch = num_samples // batch_size

    if augment:
        steps_per_epoch = steps_per_epoch * 4

    cb = [ValidationCallback(val_samples // batch_size, val_generator)
          ] + callbacks(logdir,
                        filename=weights_name,
                        weightsdir=weights_dir,
                        monitor_val='mIOU',
                        base_lr=0.0002,
                        max_lr=0.002,
                        steps_per_epoch=steps_per_epoch)
    model.fit_generator(generator=train_generator,
                        steps_per_epoch=steps_per_epoch,
                        epochs=10000,
                        verbose=True,
                        workers=8,
                        callbacks=cb,
                        initial_epoch=initial_epoch)
def run():
    np.random.seed(2)
    tf.set_random_seed(2)
    data_dir = '/data/{}/'
    weights_dir = 'weights_train'

    for i, run in enumerate(runs):
        base_lr = run['base_lr']
        max_lr = run['max_lr']
        input_size = (run['input_size'], run['input_size'])
        weights_name = run['name']
        logs_dir = 'logs/{}'.format(run['name'])
        batch_size = run['batch_size']

        print("Running for config {}".format(run))

        for j, dataset in enumerate(run['datasets']):

            binary = True if dataset != 'multiclass' else False
            nb_classes = 1 if binary else 5

            print('Running training for {}'.format(dataset))

            train_generator, num_samples = create_generator(
                os.path.join(data_dir.format(dataset), 'train'),
                input_size,
                batch_size,
                nb_classes=nb_classes,
                rescale_masks=run['rescale_masks'],
                binary=binary,
                augment=False,
                mean=np.array([[[0.36654497, 0.35386439, 0.30782658]]]),
                std=np.array([[[0.19212837, 0.19031791, 0.18903286]]]))

            val_generator, val_samples = create_generator(
                os.path.join(data_dir.format(dataset), 'val'),
                input_size,
                batch_size,
                nb_classes=nb_classes,
                rescale_masks=run['rescale_masks'],
                binary=binary,
                augment=False,
                mean=np.array([[[0.36654497, 0.35386439, 0.30782658]]]),
                std=np.array([[[0.19212837, 0.19031791, 0.18903286]]]))

            if run['network'] == 'unet':
                model = build_unet(input_size, nb_classes=nb_classes)
            else:
                model = build_densenet(input_size, nb_classes, 67)

            model.summary()
            gpus = get_number_of_gpus()
            print('Fund {} gpus'.format(gpus))
            if gpus > 1:
                model = ModelMGPU(model, gpus)

            if binary:
                loss = binary_soft_jaccard_loss
            else:
                loss = soft_jaccard_loss
            model.compile(optimizer=Adam(),
                          loss=loss,
                          metrics=['acc', binary_jaccard_distance_rounded])

            if run['pre_weights_name']:
                pre_weights_name = run['pre_weights_name'].format(dataset)
                weight = 'weights_train/weights.{}.h5'.format(pre_weights_name)
                print('Loading weights: {}'.format(weight))
                model.load_weights(weight)

            steps_per_epoch = num_samples // batch_size
            cyclic = 'triangular2'

            cb = [
                ValidationCallback(
                    val_samples // batch_size, val_generator, binary=binary)
            ] + callbacks(logs_dir.format(dataset),
                          filename=weights_name.format(dataset),
                          weightsdir=weights_dir,
                          monitor_val='mIOU',
                          base_lr=base_lr,
                          max_lr=max_lr,
                          steps_per_epoch=steps_per_epoch,
                          cyclic=cyclic)
            model.fit_generator(generator=train_generator,
                                steps_per_epoch=steps_per_epoch,
                                epochs=100,
                                verbose=True,
                                workers=8,
                                callbacks=cb)
            K.clear_session()
def run():
    np.random.seed(2)
    tf.set_random_seed(2)

    base_lr = 0.0002
    max_lr = 0.002
    data_dir = '/data/buildings/'
    logs_dir = 'logs/{}'
    weights_dir = 'weights_train'
    weights_name = 'unet-experiment-{}'
    input_size = (320, 320)
    batch_size = 20
    start_from = 6
    binary = True
    session_config()

    train_generator, num_samples = create_generator(os.path.join(
        data_dir, 'train'),
                                                    input_size,
                                                    batch_size,
                                                    nb_classes=1,
                                                    rescale_masks=False,
                                                    binary=binary,
                                                    augment=False)
    val_generator, val_samples = create_generator(os.path.join(
        data_dir, 'val'),
                                                  input_size,
                                                  batch_size,
                                                  nb_classes=1,
                                                  rescale_masks=False,
                                                  binary=binary,
                                                  augment=False)

    for i, options in enumerate(experiments):
        if i < start_from:
            continue
        print('Running experiment {} with options: {}'.format(str(i), options))
        optimizer = get_optimizer(options['optimizer'], max_lr)
        loss = get_loss(options['loss'])
        dropout = options['dropout']

        model = build_unet(input_size, nb_classes=1, dropout=dropout)
        model.summary()
        model.compile(optimizer=optimizer,
                      loss=loss,
                      metrics=['acc', binary_jaccard_distance_rounded])

        steps_per_epoch = num_samples // batch_size

        lr_opt = options['lr']
        if lr_opt == 'annealing':
            cyclic = None
        else:
            cyclic = lr_opt

        print('Experiment {} using lr: {}'.format(str(i), cyclic))
        model.fit_generator(
            generator=train_generator,
            validation_data=val_generator,
            validation_steps=val_samples // batch_size,
            steps_per_epoch=steps_per_epoch,
            epochs=20,
            verbose=True,
            workers=8,
            callbacks=callbacks(
                logs_dir.format(str(i)),
                filename=weights_name.format(str(i)),
                weightsdir=weights_dir,
                monitor_val='val_binary_jaccard_distance_rounded',
                base_lr=base_lr,
                max_lr=max_lr,
                steps_per_epoch=steps_per_epoch,
                cyclic=cyclic))

        K.clear_session()