def main(): args = get_args() input_path = args.input db_name = args.db batch_size = args.batch_size nb_epochs = args.nb_epochs validation_split = args.validation_split netType1 = args.netType1 netType2 = args.netType2 logging.debug("Loading data...") image, gender, age, image_size = load_data_npz(input_path) # image, age, image_size = load_data_npz_megaface(input_path) x_data = image # y_data_a = age y_data_g = gender start_decay_epoch = [30, 60] optMethod = Adam() stage_num = [3, 3, 3] lambda_local = 0.25 * (netType1 % 5) lambda_d = 0.25 * (netType2 % 5) model = SSR_net_general(image_size, stage_num, lambda_local, lambda_d)() save_name = 'ssrnet_%d_%d_%d_%d_%s_%s' % (stage_num[0], stage_num[1], stage_num[2], image_size, lambda_local, lambda_d) model.compile(optimizer=optMethod, loss=["mae"], metrics={'pred': 'mae'}) if db_name == "wiki": weight_file = "imdb_gender_models/" + save_name + "/" + save_name + ".h5" model.load_weights(weight_file) elif db_name == "morph": weight_file = "wiki_gender_models/" + save_name + "/" + save_name + ".h5" model.load_weights(weight_file) # elif db_name == "megaface_asian": # weight_file = "wiki_models/"+save_name+"/"+save_name+".h5" # model.load_weights(weight_file) logging.debug("Model summary...") model.count_params() model.summary() logging.debug("Saving model...") mk_dir(db_name + "_gender_models") mk_dir(db_name + "_gender_models/" + save_name) mk_dir(db_name + "_checkpoints") plot_model(model, to_file=db_name + "_gender_models/" + save_name + "/" + save_name + ".png") with open( os.path.join(db_name + "_models/" + save_name, save_name + '.json'), "w") as f: f.write(model.to_json()) decaylearningrate = TYY_callbacks.DecayLearningRate(start_decay_epoch) callbacks = [ ModelCheckpoint(db_name + "_checkpoints/weights.{epoch:02d}-{val_loss:.2f}.hdf5", monitor="val_loss", verbose=1, save_best_only=True, mode="auto"), decaylearningrate ] logging.debug("Running training...") data_num = len(x_data) indexes = np.arange(data_num) np.random.shuffle(indexes) x_data = x_data[indexes] # y_data_a = y_data_a[indexes] y_data_g = y_data_g[indexes] train_num = int(data_num * (1 - validation_split)) x_train = x_data[:train_num] x_test = x_data[train_num:] # y_train_a = y_data_a[:train_num] # y_test_a = y_data_a[train_num:] y_train_g = y_data_g[:train_num] y_test_g = y_data_g[train_num:] hist = model.fit_generator(generator=data_generator_reg( X=x_train, Y=y_train_g, batch_size=batch_size), steps_per_epoch=train_num // batch_size, validation_data=(x_test, [y_test_g]), epochs=nb_epochs, verbose=1, callbacks=callbacks) logging.debug("Saving weights...") model.save_weights(os.path.join(db_name + "_gender_models/" + save_name, save_name + '.h5'), overwrite=True) pd.DataFrame(hist.history).to_hdf( os.path.join(db_name + "_gender_models/" + save_name, 'history_' + save_name + '.h5'), "history")
def main(): #dynamicaly allocate GPU memory config = tf.ConfigProto() config.gpu_options.allow_growth = True sess = tf.Session(config=config) K.tensorflow_backend.set_session(sess) print('[LOAD DATA]') images, ageLabel, genderLabel = prepData(64) n_fold = 1 img_size = 64 stage_num = [3,3,3] lambda_local = 1 lambda_d = 1 imdb_model = SSR_net(img_size,stage_num, lambda_local, lambda_d)() imdb_model.compile(optimizer='adam', loss="mae", metrics=["mae"]) imdb_model.load_weights("imdb_age_ssrnet_3_3_3_64_1.0_1.0.h5") imdb_model_gender = SSR_net_general(img_size,stage_num, lambda_local, lambda_d)() imdb_model_gender.compile(optimizer='adam', loss="mae", metrics=["binary_accuracy"]) imdb_model_gender.load_weights("imdb_gender_ssrnet_3_3_3_64_1.0_1.0.h5") wiki_model = SSR_net(img_size,stage_num, lambda_local, lambda_d)() wiki_model.compile(optimizer='adam', loss="mae", metrics=["mae"]) wiki_model.load_weights("wiki_age_ssrnet_3_3_3_64_1.0_1.0.h5") wiki_model_gender = SSR_net_general(img_size,stage_num, lambda_local, lambda_d)() wiki_model_gender.compile(optimizer='adam', loss="mae", metrics=["binary_accuracy"]) wiki_model_gender.load_weights("wiki_gender_ssrnet_3_3_3_64_1.0_1.0.h5") morph_model = SSR_net(img_size,stage_num, lambda_local, lambda_d)() morph_model.compile(optimizer='adam', loss="mae", metrics=["mae"]) morph_model.load_weights("morph_age_ssrnet_3_3_3_64_1.0_1.0.h5") morph_model_gender = SSR_net_general(img_size,stage_num, lambda_local, lambda_d)() morph_model_gender.compile(optimizer='adam', loss="mae", metrics=["binary_accuracy"]) morph_model_gender.load_weights("morph_gender_ssrnet_3_3_3_64_1.0_1.0.h5") print('[K-FOLD] Started...') kf = KFold(n_splits=10, shuffle=True, random_state=1) kf_split = kf.split(ageLabel) for _, test_idx in kf_split: print('[K-FOLD] Fold {}'.format(n_fold)) testImages = images[test_idx] testAge = ageLabel[test_idx] testGender = genderLabel[test_idx] scores = evaluate(imdb_model, testImages, testAge) print('imdb Age score:', scores) scores = evaluate(wiki_model, testImages, testAge) print('wiki Age score:', scores) scores = evaluate(morph_model, testImages, testAge) print('morph Age score:', scores) scores = evaluate(imdb_model_gender, testImages, testGender) print('imdb Gender score:', scores) scores = evaluate(wiki_model_gender, testImages, testGender) print('wiki Gender score:', scores) scores = evaluate(morph_model_gender, testImages, testGender) print('morph Gender score:', scores) n_fold += 1 del testImages, testAge, testGender, scores