Esempio n. 1
0
def main():
    args = get_args()

    experiment_name = 'yu4u'
    early_stop_patience = config.EARLY_STOP_EPOCHS

    PARAMS = {
        'epoch_nr': args.nb_epochs,
        'batch_size': args.batch_size,
        'learning_rate': args.lr,
        'early_stop': early_stop_patience,
        'image_size': config.IMAGE_SIZE,
        'network': args.model_name
    }

    neptune.init(project_qualified_name='4ND4/sandbox')
    neptune_tb.integrate_with_keras()
    result = neptune.create_experiment(name=experiment_name, params=PARAMS)

    name = result.id

    print(name)

    image_path = args.pred_dir
    model_name = args.model_name
    batch_size = args.batch_size
    nb_epochs = args.nb_epochs
    lr = args.lr

    image_size = config.IMAGE_SIZE

    train_path = os.path.join(image_path, 'train')
    validation_path = os.path.join(image_path, 'validation')
    test_path = os.path.join(image_path, 'test')

    train_gen, val_gen, test_gen = getdata(train_path, validation_path,
                                           test_path)

    model = get_model(model_name=model_name,
                      image_size=image_size,
                      number_classes=nb_classes)

    sgd = SGD(lr=0.1, momentum=0.9, nesterov=True)

    model.compile(optimizer=sgd,
                  loss="categorical_crossentropy",
                  metrics=[age_mae])

    model.summary()

    output_dir = Path(__file__).resolve().parent.joinpath(args.output_dir)

    if not output_dir.exists():
        output_dir.mkdir(parents=True)

    if not os.path.exists('checkpoints/{}'.format(name)):
        os.mkdir('checkpoints/{}'.format(name))

    callbacks = [
        EarlyStopping(monitor='val_age_mae',
                      mode='min',
                      verbose=1,
                      patience=early_stop_patience),
        LearningRateScheduler(schedule=Schedule(nb_epochs, initial_lr=lr)),
        ModelCheckpoint(os.path.join(output_dir, name) +
                        "/weights.{epoch:03d}-{val_loss:.3f}-{"
                        "val_age_mae:.3f}.hdf5",
                        monitor="val_age_mae",
                        verbose=1,
                        save_best_only=True,
                        mode="min")
    ]

    hist = model.fit_generator(train_gen,
                               steps_per_epoch=train_gen.samples // batch_size,
                               validation_data=val_gen,
                               epochs=nb_epochs,
                               verbose=1,
                               callbacks=callbacks)

    np.savez(str(output_dir.joinpath("history_{}.npz".format(name))),
             history=hist.history)
Esempio n. 2
0
def main():
    nb_epochs = config.MAXIMUM_EPOCHS

    # changing params with winning hyper-params

    # lr = 0.1
    # momentum = 0.9
    # batch_size = config.batch_size
    image_size = config.IMAGE_SIZE

    batch_size = 96
    lr = 0.0008828491766861475
    momentum = 0.878361507747756
    image_size = 224

    model_name = 'ResNet50'

    output_dir = 'checkpoints'

    experiment_name = 'DeepUAge2.0'
    early_stop_patience = config.EARLY_STOP_EPOCHS

    train_path = os.path.join(image_directory, 'train')
    validation_path = os.path.join(image_directory, 'validation')
    # test_path = os.path.join(image_directory, 'test')

    PARAMS = {
        'epoch_nr': nb_epochs,
        'batch_size': batch_size,
        'learning_rate': lr,
        'momentum': momentum,
        'early_stop': early_stop_patience,
        'image_size': image_size,
        'network': model_name,
        'monitor': monitor,
        'dataset_path': image_directory
    }

    if log_results:

        neptune.init(project_qualified_name='4ND4/sandbox')
        neptune_tb.integrate_with_keras()
        result = neptune.create_experiment(name=experiment_name, params=PARAMS)
        name = result.id
    else:
        name = 'debug'

    print(name)

    train_gen, val_gen = getdata(train_path, validation_path)

    model = get_model(model_name=model_name,
                      image_size=image_size,
                      number_classes=nb_classes)

    sgd = SGD(lr=lr, momentum=momentum, nesterov=True)

    model.compile(optimizer=sgd,
                  loss="categorical_crossentropy",
                  metrics=[age_mae])

    model.summary()

    output_dir = Path(__file__).resolve().parent.joinpath(output_dir)

    if not output_dir.exists():
        output_dir.mkdir(parents=True)

    if not os.path.exists('checkpoints/{}'.format(name)):
        os.mkdir('checkpoints/{}'.format(name))

    callbacks = [
        EarlyStopping(monitor=monitor,
                      mode='min',
                      verbose=1,
                      patience=early_stop_patience),
        ModelCheckpoint(os.path.join(output_dir, name) +
                        "/weights.{epoch:03d}-{val_loss:.3f}-{"
                        "val_age_mae:.3f}.hdf5",
                        monitor=monitor,
                        verbose=1,
                        save_best_only=True,
                        mode="min")
    ]

    hist = model.fit_generator(generator=train_gen,
                               epochs=nb_epochs,
                               validation_data=val_gen,
                               verbose=1,
                               callbacks=callbacks)

    np.savez(str(output_dir.joinpath("history_{}.npz".format(name))),
             history=hist.history)
Esempio n. 3
0
PARAMS = {
    'epoch_nr': EPOCHS,
    'batch_size': 64,
    'learning_rate': 0.006,
    'input_shape': (512, 32, 3),
    'early_stop': 20
}

# start experiment

name = 'resnet50-experiment'

if LOG_NEPTUNE:
    neptune.init(project_qualified_name='4ND4/sandbox')
    neptune_tb.integrate_with_keras()
    result = neptune.create_experiment(name=name, params=PARAMS)

    name = result.id


def getdata():
    _data = np.load('output/DeepUAge-faces-embeddings_{}_{}.npz'.format(
        VECTOR_SIZE, FACE_DETECTION))
    embeddings_val_npz_path = 'output/DeepUAge-val-faces-embeddings-{}-{}.npz'.format(
        VECTOR_SIZE, FACE_DETECTION)

    trainX, trainy, valX, valy = _data['arr_0'], _data['arr_1'], _data[
        'arr_2'], _data['arr_3']
    # print('Dataset: train=%d, test=%d' % (trainX.shape[0], valX.shape[0]))
Esempio n. 4
0
def main():
    nb_epochs = config.MAXIMUM_EPOCHS
    batch_size = config.batch_size
    lr = 0.1
    momentum = 0.9
    model_name = 'ResNet50'
    image_size = config.IMAGE_SIZE
    output_dir = 'checkpoints'

    experiment_name = 'yu4u'
    early_stop_patience = config.EARLY_STOP_EPOCHS

    PARAMS = {
        'epoch_nr': nb_epochs,
        'batch_size': batch_size,
        'learning_rate': lr,
        # 'input_shape': (512, 32, 3),
        'early_stop': early_stop_patience,
        'image_size': config.IMAGE_SIZE,
        'network': model_name
    }

    if config.LOG_RESULTS:
        neptune.init(project_qualified_name='4ND4/sandbox')
        neptune_tb.integrate_with_keras()
        result = neptune.create_experiment(name=experiment_name, params=PARAMS)

        name = result.id

    else:
        name = 'no_log'

    train_gen = FaceGenerator(image_directory,
                              batch_size=batch_size,
                              image_size=image_size,
                              number_classes=nb_classes)
    val_gen = ValGenerator(image_directory,
                           batch_size=batch_size,
                           image_size=image_size,
                           number_classes=nb_classes)

    model = get_model(model_name=model_name,
                      image_size=image_size,
                      number_classes=nb_classes)

    sgd = SGD(lr=lr, momentum=momentum, nesterov=True)

    model.compile(optimizer=sgd,
                  loss="categorical_crossentropy",
                  metrics=[age_mae])

    model.summary()

    output_dir = Path(__file__).resolve().parent.joinpath(output_dir)

    if not output_dir.exists():
        output_dir.mkdir(parents=True)

    if not os.path.exists('checkpoints/{}'.format(name)):
        os.mkdir('checkpoints/{}'.format(name))

    callbacks = [
        EarlyStopping(monitor='val_age_mae',
                      mode='min',
                      verbose=1,
                      patience=early_stop_patience),
        LearningRateScheduler(schedule=Schedule(nb_epochs, initial_lr=lr)),
        ModelCheckpoint(os.path.join(output_dir, name) +
                        "/weights.{epoch:03d}-{val_loss:.3f}-{"
                        "val_age_mae:.3f}.hdf5",
                        monitor="val_age_mae",
                        verbose=1,
                        save_best_only=True,
                        mode="min")
    ]

    hist = model.fit_generator(generator=train_gen,
                               epochs=nb_epochs,
                               validation_data=val_gen,
                               verbose=1,
                               callbacks=callbacks)

    np.savez(str(output_dir.joinpath("history_{}.npz".format(name))),
             history=hist.history)