예제 #1
0
def hyper_tuner(month_path, log_path, time_step, train_data, valid_data,
                test_data, item, item_index, item_name):

    train_x, valid_x, test_x, train_y, valid_y, test_y = data_split_norm(
        train_data, valid_data, test_data, item_index, time_step)

    hyper_model = MyHyperModel(input_shape=train_x.shape[-2:])

    project_name = item_name + "_" + str(time_step)

    tuner = RandomSearch(hyper_model,
                         objective='val_loss',
                         max_trials=150,
                         executions_per_trial=5,
                         directory=log_path,
                         project_name=project_name)

    callbacks = [
        tf.keras.callbacks.EarlyStopping(monitor='val_loss',
                                         min_delta=1e-3,
                                         patience=15,
                                         restore_best_weights=True),
        tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',
                                             factor=0.1,
                                             patience=5,
                                             verbose=0,
                                             mode='auto',
                                             min_delta=0.0001),
        ClearTrainingOutput()
    ]

    tuner.search_space_summary()

    tuner.search(train_x,
                 train_y,
                 epochs=200,
                 batch_size=256,
                 validation_data=(valid_x, valid_y),
                 callbacks=callbacks,
                 verbose=2)

    # Get the optimal hyperparameters
    best_hps = tuner.get_best_hyperparameters(num_trials=1)[0]

    model = tuner.hypermodel.build(best_hps)
    history = model.fit(train_x,
                        train_y,
                        epochs=200,
                        batch_size=256,
                        validation_data=(valid_x, valid_y),
                        callbacks=callbacks,
                        verbose=2)
    models_path = os.path.join(month_path, "models")
    if not os.path.exists(models_path):
        os.mkdir(models_path)
    model_name = item + "_model.h5"
    model_path = os.path.join(models_path, model_name)
    model.save(model_path)
    # 清除tuner
    del tuner

    layers_path = os.path.join(month_path, item)
    if not os.path.exists(layers_path):
        os.mkdir(layers_path)
    plot_path_1 = os.path.join(layers_path, "loss_epoch.png")
    plot_path_2 = os.path.join(layers_path, "measure_predict.png")
    csv_path = os.path.join(layers_path, "measure_predict.csv")

    history = history.history

    plt.plot(history['loss'], linewidth=2, label='Train')
    plt.plot(history['val_loss'], linewidth=2, label='Test')
    plt.legend(loc='upper right')
    plt.ylabel('Mean Relative Error [$' + item + '$]')
    plt.xlabel('Epoch')
    plt.savefig(plot_path_1, dpi=300, bbox_inches="tight")
    plt.close()

    test_predictions = model.predict(test_x)
    plt.figure(figsize=(20, 6))
    plt.plot(test_y, label='measure')
    plt.plot(test_predictions, label='predict')
    plt.legend(loc='upper right')
    if item is 'PH':
        plt.ylabel(item)
    else:
        plt.ylabel(item + '(mg/L)')
    plt.xlabel('Test set')
    plt.savefig(plot_path_2, dpi=300, bbox_inches="tight")
    plt.close()

    test_predictions = test_predictions.flatten()  # flatten()降为一维
    valid_predictions = model.predict(valid_x).flatten()

    train_fvu, train_rmse, train_mre = history['loss'][-1], history[
        'root_mean_squared_error'][-1], history[
            "mean_absolute_percentage_error"][-1]

    valid_fvu, valid_rmse, valid_mre = model.evaluate(valid_x,
                                                      valid_y,
                                                      verbose=0)
    valid_nse, valid_cc = calc_nse_cc(valid_y, valid_predictions)
    test_fvu, test_rmse, test_mre = model.evaluate(test_x, test_y, verbose=0)

    test_nse, test_cc = calc_nse_cc(test_y, test_predictions)

    measure_predict_data = pd.DataFrame()
    measure_predict_data['predict'] = test_predictions
    measure_predict_data['measure'] = test_y
    measure_predict_data.to_csv(csv_path, encoding='utf-8')
    try:
        hidden_layer_1 = best_hps.get('units_0')
    except:
        hidden_layer_1 = 0
    try:
        hidden_layer_2 = best_hps.get('units_1')
    except:
        hidden_layer_2 = 0

    model_info = {
        'output': item,
        "time_step": time_step,
        'hidden_layer_1': hidden_layer_1,
        'hidden_layer_2': hidden_layer_2,
        'hidden_layer_3': best_hps.get('units_n'),
        'train_fvu': train_fvu,
        'train_rmse': train_rmse,
        'train_mre': train_mre,
        "valid_fvu": valid_fvu,
        'valid_nse': valid_nse,
        'valid_rmse': valid_rmse,
        'valid_mre': valid_mre,
        'valid_cc': valid_cc,
        "test_fvu": test_fvu,
        'test_nse': test_nse,
        'test_rmse': test_rmse,
        'test_mre': test_mre,
        'test_cc': test_cc
    }
    return model_info
예제 #2
0
def hyper_tuner(log_path, time_step, train_data, valid_data, test_data, item,
                item_index, item_name):
    keras.backend.clear_session()

    train_x, valid_x, test_x, train_y, valid_y, test_y = data_split_norm(
        train_data, valid_data, test_data, item_index, time_step)

    hyper_model = MyHyperModel(input_shape=train_x.shape[-2:])

    project_name = item_name + "_" + str(time_step)

    tuner = RandomSearch(hyper_model,
                         objective='val_loss',
                         max_trials=150,
                         executions_per_trial=10,
                         directory=log_path,
                         project_name=project_name)

    callbacks = [
        tf.keras.callbacks.EarlyStopping(monitor='val_loss',
                                         min_delta=1e-3,
                                         patience=15,
                                         restore_best_weights=True),
        tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',
                                             factor=0.1,
                                             patience=5,
                                             verbose=0,
                                             mode='auto',
                                             min_delta=0.0001),
        ClearTrainingOutput()
    ]

    tuner.search_space_summary()

    tuner.search(train_x,
                 train_y,
                 epochs=200,
                 batch_size=256,
                 validation_data=(valid_x, valid_y),
                 callbacks=callbacks,
                 verbose=2)

    # Get the optimal hyperparameters
    best_hps = tuner.get_best_hyperparameters(num_trials=1)[0]

    try:
        hidden_layer_1 = best_hps.get('units_0')
    except:
        hidden_layer_1 = 0
    try:
        hidden_layer_2 = best_hps.get('units_1')
    except:
        hidden_layer_2 = 0

    model_info = {
        'output': item,
        "time_step": time_step,
        'hidden_layer_1': hidden_layer_1,
        'hidden_layer_2': hidden_layer_2,
        'hidden_layer_3': best_hps.get('units_n'),
    }

    # 清除tuner
    del tuner

    return model_info

LOG_DIR = f"{int(time.time())}"

tuner = RandomSearch(build_model,
                     objective="val_accuracy",
                     max_trials=1,
                     executions_per_trial=1,
                     directory=LOG_DIR)

tuner.search(x=X_train,
             y=y_train,
             epochs=150,
             batch_size=64,
             validation_split=0.1)
print(tuner.get_best_hyperparameters()[0].values)
print(tuner.results_summary())

best_model = tuner.get_best_models(num_models=1)[0]
NN_pred = best_model.predict(X_test)
pred = list()
for i in range(len(NN_pred)):
    pred.append(np.argmax(NN_pred[i]))
test = list()
for i in range(len(y_test)):
    test.append(np.argmax(y_test[i]))
train_pred = best_model.predict(X_train)
train_prediction = list()
for i in range(len(train_pred)):
    train_prediction.append(np.argmax(train_pred[i]))
train = list()
예제 #4
0
    callbacks = [
        tf.keras.callbacks.EarlyStopping(monitor='val_loss',
                                         mode='min',
                                         patience=100,
                                         restore_best_weights=True,
                                         min_delta=0.00001,
                                         verbose=1)
    ]
    tuner_rs.search(x_train_simple_net,
                    y_train_simple_net,
                    epochs=1000,
                    validation_split=0.2,
                    verbose=0,
                    callbacks=callbacks)

    best_hp = tuner_rs.get_best_hyperparameters(num_trials=10)
    model = tuner_rs.hypermodel.build(best_hp)

    # run1 = {'units': 8, 'activation': 'relu', 'kernel_initializer': 'random_normal', 'bias_initializer': 'lecun_normal', 'regularizers.l2': 0.001, 'learning_rate': 0.001}
    # run2 = {'units': 8, 'activation': 'elu', 'kernel_initializer': 'random_normal', 'bias_initializer': 'lecun_normal', 'regularizers.l2': 0.001, 'learning_rate': 0.001}
    # run3 = {'units': 8, 'activation': 'elu', 'kernel_initializer': 'random_uniform', 'bias_initializer': 'random_normal', 'regularizers.l2': 0.001, 'learning_rate': 0.001}
    # run4 = {'units': 6, 'activation': 'elu', 'kernel_initializer': 'random_normal', 'bias_initializer': 'random_uniform', 'regularizers.l2': 0.001, 'learning_rate': 0.001}
    # run5 = {'units': 8, 'activation': 'elu', 'kernel_initializer': 'random_uniform', 'bias_initializer': 'lecun_normal', 'regularizers.l2': 0.001, 'learning_rate': 0.001}

    best_model = tuner_rs.get_best_models(num_models=1)[0]
    # loss, mse = best_model.evaluate(x_valid_simple_net, y_valid_simple_net)

    # ----------------------------after finding the best hyper-parameters train the model-----------------------------

    # net_model, model_history, early_stoping = obj1.tuned_net(x_train_simple_net, y_train_simple_net)
    return model


tuner_search = RandomSearch(build_model,
                            objective='accuracy',
                            max_trials=5,
                            directory=r"C:\Users\khares\Work\output13")

tuner_search.search(training_set, epochs=5)

model = tuner_search.get_best_models(num_models=1)[0]

model.summary()

best_hyperparameters = tuner_search.get_best_hyperparameters(1)[0].values
best_hyperparameters

model = Sequential()
model.add(
    Conv2D(filters=48,
           kernel_size=5,
           padding="same",
           activation="relu",
           input_shape=(224, 224, 3)))
model.add(MaxPooling2D(pool_size=4))
model.add(Conv2D(filters=160, kernel_size=3, padding="same",
                 activation="relu"))
model.add(MaxPooling2D(pool_size=4))
model.add(Conv2D(filters=144, kernel_size=5, padding="same",
                 activation="relu"))