def main():
    args = get_args()
    input_path1 = args.input1
    input_path2 = args.input2
    db_name = args.db
    batch_size = args.batch_size
    nb_epochs = args.nb_epochs
    netType1 = args.netType1
    netType2 = args.netType2

    logging.debug("Loading training data...")
    image1, age1, image_size = load_data_npz(input_path1)
    logging.debug("Loading testing data...")
    image2, age2, image_size = load_data_npz(input_path2)

    start_decay_epoch = [30, 60]

    optMethod = Adam()

    stage_num = [3, 3, 3]
    lambda_local = 0.25 * (netType1 % 5)
    lambda_d = 0.25 * (netType2 % 5)

    model = SSR_net(image_size, stage_num, lambda_local, lambda_d)()
    save_name = 'ssrnet_%d_%d_%d_%d_%s_%s' % (stage_num[0], stage_num[1],
                                              stage_num[2], image_size,
                                              lambda_local, lambda_d)
    model.compile(optimizer=optMethod, loss=["mae"], metrics={'pred_a': 'mae'})

    if db_name == "megaage":
        weight_file = "./pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5"
        #weight_file = "./pre-trained/imdb/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5"
        model.load_weights(weight_file)

    logging.debug("Model summary...")
    model.count_params()
    model.summary()

    logging.debug("Saving model...")
    mk_dir(db_name + "_models")
    mk_dir(db_name + "_models/batch_size_%d/" % (batch_size))
    mk_dir(db_name + "_models/batch_size_%d/" % (batch_size) + save_name)
    mk_dir(db_name + "_checkpoints")
    mk_dir(db_name + "_checkpoints/batch_size_%d/" % (batch_size))
    plot_model(model,
               to_file=db_name + "_models/batch_size_%d/" % (batch_size) +
               save_name + "/" + save_name + ".png")

    with open(
            os.path.join(
                db_name + "_models/batch_size_%d/" % (batch_size) + save_name,
                save_name + '.json'), "w") as f:
        f.write(model.to_json())

    decaylearningrate = TYY_callbacks.DecayLearningRate(start_decay_epoch)

    callbacks = [
        ModelCheckpoint(db_name + "_checkpoints/batch_size_%d/" %
                        (batch_size) +
                        "weights.{epoch:02d}-{val_loss:.2f}.hdf5",
                        monitor="val_loss",
                        verbose=1,
                        save_best_only=True,
                        mode="auto"), decaylearningrate
    ]
    logging.debug("Running training...")

    data_num = len(image1) + len(image2)
    indexes1 = np.arange(len(image1))
    indexes2 = np.arange(len(image2))
    np.random.shuffle(indexes1)
    np.random.shuffle(indexes2)
    x_train = image1[indexes1]
    x_test = image2[indexes2]
    y_train_a = age1[indexes1]
    y_test_a = age2[indexes2]
    train_num = len(image1)

    hist = model.fit_generator(generator=data_generator_reg(
        X=x_train, Y=y_train_a, batch_size=batch_size),
                               steps_per_epoch=train_num // batch_size,
                               validation_data=(x_test, [y_test_a]),
                               epochs=nb_epochs,
                               verbose=1,
                               callbacks=callbacks)

    logging.debug("Saving weights...")
    model.save_weights(os.path.join(
        db_name + "_models/batch_size_%d/" % (batch_size) + save_name,
        save_name + '.h5'),
                       overwrite=True)
    pd.DataFrame(hist.history).to_hdf(
        os.path.join(
            db_name + "_models/batch_size_%d/" % (batch_size) + save_name,
            'history_' + save_name + '.h5'), "history")
Beispiel #2
0
def main():
    args = get_args()
    input_path = args.input
    db_name = args.db
    batch_size = args.batch_size
    nb_epochs = args.nb_epochs
    validation_split = args.validation_split
    netType1 = args.netType1
    netType2 = args.netType2

    logging.debug("Loading data...")
    image, gender, age, image_size = load_data_npz(input_path)
    
    x_data = image
    y_data_a = age

    start_decay_epoch = [30,60]

    optMethod = Adam()

    stage_num = [3,3,3]
    lambda_local = 0.25*(netType1%5)
    lambda_d = 0.25*(netType2%5)

    model = SSR_net(image_size,stage_num, lambda_local, lambda_d)()
    save_name = 'ssrnet_%d_%d_%d_%d_%s_%s' % (stage_num[0],stage_num[1],stage_num[2], image_size, lambda_local, lambda_d)
    model.compile(optimizer=optMethod, loss=["mae"], metrics={'pred_a':'mae'})


    if db_name == "wiki":
        weight_file = f'imdb_models/{save_name}/{save_name}.h5'
        if os.path.isfile(weight_file): # error out if file not exist
            print("previous weight loading...")
            model.load_weights(weight_file)
    elif db_name == "morph": 
        weight_file = f'wiki_models/{save_name}/{save_name}.h5'
        if os.path.isfile(weight_file):
            print("previous weight loading...")
            model.load_weights(weight_file) 

    
    logging.debug("Model summary...")
    model.count_params()
    model.summary()

    logging.debug("Saving model...")
    mk_dir(db_name+"_models")
    mk_dir(db_name+"_models/"+save_name)
    mk_dir(db_name+"_checkpoints")
    plot_model(model, to_file=db_name+"_age_models/"+save_name+"/"+save_name+".png")

    with open(os.path.join(db_name+"_age_models/"+save_name, save_name+'.json'), "w") as f:
        f.write(model.to_json())

    
    decaylearningrate = TYY_callbacks.DecayLearningRate(start_decay_epoch)

    callbacks = [ModelCheckpoint(db_name+"_checkpoints/weights.{epoch:02d}-{val_loss:.2f}.hdf5",
                                 monitor="val_loss",
                                 verbose=1,
                                 save_best_only=True,
                                 mode="auto"), decaylearningrate
                        ]

    logging.debug("Running training...")
    


    data_num = len(x_data)
    indexes = np.arange(data_num)
    np.random.shuffle(indexes)
    x_data = x_data[indexes]
    y_data_a = y_data_a[indexes]
    train_num = int(data_num * (1 - validation_split))
    
    x_train = x_data[:train_num]
    x_test = x_data[train_num:]
    y_train_a = y_data_a[:train_num]
    y_test_a = y_data_a[train_num:]


    hist = model.fit_generator(generator=data_generator_reg(X=x_train, Y=y_train_a, batch_size=batch_size),
                                   steps_per_epoch=train_num // batch_size,
                                   validation_data=(x_test, [y_test_a]),
                                   epochs=nb_epochs, verbose=1,
                                   callbacks=callbacks)

    logging.debug("Saving weights...")
    model.save_weights(os.path.join(db_name+"_models/"+save_name, save_name+'.h5'), overwrite=True)
    pd.DataFrame(hist.history).to_hdf(os.path.join(db_name+"_models/"+save_name, 'history_'+save_name+'.h5'), "history")
def main():
    #dynamicaly allocate GPU memory
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    K.tensorflow_backend.set_session(sess)
    print('[LOAD DATA]')
    images, ageLabel, genderLabel = prepData(64)
    n_fold = 1
    
    img_size = 64
    stage_num = [3,3,3]
    lambda_local = 1
    lambda_d = 1
    imdb_model = SSR_net(img_size,stage_num, lambda_local, lambda_d)()
    imdb_model.compile(optimizer='adam', loss="mae", metrics=["mae"])
    imdb_model.load_weights("imdb_age_ssrnet_3_3_3_64_1.0_1.0.h5")
    
    imdb_model_gender = SSR_net_general(img_size,stage_num, lambda_local, lambda_d)()
    imdb_model_gender.compile(optimizer='adam', loss="mae", metrics=["binary_accuracy"])
    imdb_model_gender.load_weights("imdb_gender_ssrnet_3_3_3_64_1.0_1.0.h5")

    wiki_model = SSR_net(img_size,stage_num, lambda_local, lambda_d)()
    wiki_model.compile(optimizer='adam', loss="mae", metrics=["mae"])
    wiki_model.load_weights("wiki_age_ssrnet_3_3_3_64_1.0_1.0.h5")
    
    wiki_model_gender = SSR_net_general(img_size,stage_num, lambda_local, lambda_d)()
    wiki_model_gender.compile(optimizer='adam', loss="mae", metrics=["binary_accuracy"])
    wiki_model_gender.load_weights("wiki_gender_ssrnet_3_3_3_64_1.0_1.0.h5")

    morph_model = SSR_net(img_size,stage_num, lambda_local, lambda_d)()
    morph_model.compile(optimizer='adam', loss="mae", metrics=["mae"])
    morph_model.load_weights("morph_age_ssrnet_3_3_3_64_1.0_1.0.h5")
    
    morph_model_gender = SSR_net_general(img_size,stage_num, lambda_local, lambda_d)()
    morph_model_gender.compile(optimizer='adam', loss="mae", metrics=["binary_accuracy"])
    morph_model_gender.load_weights("morph_gender_ssrnet_3_3_3_64_1.0_1.0.h5")

   
    print('[K-FOLD] Started...')
    kf = KFold(n_splits=10, shuffle=True, random_state=1)
    kf_split = kf.split(ageLabel)
    for _, test_idx in kf_split:
        print('[K-FOLD] Fold {}'.format(n_fold))      
        testImages = images[test_idx]
        testAge = ageLabel[test_idx]
        testGender = genderLabel[test_idx]

        scores = evaluate(imdb_model, testImages, testAge)
        print('imdb Age score:', scores)
        scores = evaluate(wiki_model, testImages, testAge)
        print('wiki Age score:', scores)
        scores = evaluate(morph_model, testImages, testAge)
        print('morph Age score:', scores)

        scores = evaluate(imdb_model_gender, testImages, testGender)
        print('imdb Gender score:', scores)
        scores = evaluate(wiki_model_gender, testImages, testGender)
        print('wiki Gender score:', scores)
        scores = evaluate(morph_model_gender, testImages, testGender)
        print('morph Gender score:', scores)

        n_fold += 1
        del	testImages, testAge, testGender, scores