예제 #1
0
def tune_with_kerastuner2():
    # model = define_one_block_model()
    tuner = RandomSearch(define_one_block_model,
                         objective='val_accuracy',
                         max_trials=5,
                         executions_per_trial=3,
                         directory="tuner_dir",
                         project_name="cats_vs_dogs_tuner")

    train_datagen = ImageDataGenerator(rescale=1.0 / 255.0)
    # width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True)
    test_datagen = ImageDataGenerator(rescale=1.0 / 255.0)
    # prepare iterators
    train_it = train_datagen.flow_from_directory('dataset_dogs_vs_cats/train/',
                                                 class_mode='binary',
                                                 batch_size=16,
                                                 target_size=(200, 200))
    test_it = test_datagen.flow_from_directory('dataset_dogs_vs_cats/test/',
                                               class_mode='binary',
                                               batch_size=16,
                                               target_size=(200, 200))

    tuner.search(train_it,
                 steps_per_epoch=len(train_it),
                 validation_data=test_it,
                 validation_steps=len(test_it),
                 epochs=5,
                 use_multiprocessing=True)

    models = tuner.get_best_models(num_models=3)
    tuner.results_summary()
    return tuner
예제 #2
0
    def fit_model(self, split_number):
        self.log_event('Training with HyperParameter Tuning is started..')

        tuner = RandomSearch(self.build_model,
                             objective=self.objective_function,
                             max_trials=self.max_trials,
                             executions_per_trial=self.execution_per_trial,
                             seed=self.random_state,
                             project_name='split_' + str(split_number),
                             directory=os.path.normpath(self.lstm_tuner_save_dir))

        tuner.search(self.train_features, self.train_targets,
                     epochs=self.epochs,
                     batch_size=self.batch_size,
                     verbose=2,
                     validation_data=(self.test_features, self.test_targets))

        model = tuner.get_best_models(num_models=1)[0]

        print(model.summary())
        keras.utils.plot_model(model,
                               to_file=self.lstm_model_save_dir + self.lstm_model_description + '.png',
                               show_shapes=True,
                               show_layer_names=True)

        # Fit the best model of the split with the data
        history = model.fit(x=self.train_features,
                            y=self.train_targets,
                            batch_size=self.batch_size,
                            epochs=self.epochs,
                            verbose=2,
                            validation_data=(self.test_features, self.test_targets))

        # Save the model
        current_time = datetime.datetime.now()
        model.save(self.lstm_tuner_save_dir + '/split_' + str(split_number) + '/' + self.lstm_model_description + '_' + str(current_time.strftime('%Y-%m-%d_%H-%M-%S')) + '_' + '.h5')

        self.lstm_model = model
        self.model_history = history



        hist_df = pd.DataFrame(history.history)
        hist_df.to_csv(self.lstm_tuner_save_dir + '/split_' + str(split_number) + '/best_model_history.csv', index=False, header=True)
        self.best_possible_models.append(hist_df)
        self.print_summary(split_number)
        self.log_event('Training with HyperParameter Tuning is finished..')
def search(dt=600,
           window_size=360,
           future_steps=144,
           epochs=50,
           with_time=True,
           batch_size=128,
           max_trials=200):
    bathroom1 = Dataset.parse('dataset/', 'bathroom1')
    kitchen1 = Dataset.parse('dataset/', 'kitchen1')
    combined1 = bathroom1.combine(kitchen1)

    X, y = prepare_data_future_steps(combined1,
                                     window_size=window_size,
                                     dt=dt,
                                     with_time=with_time,
                                     future_steps=future_steps)
    X_train = X[:-4 * (3600 // dt) * 24, :, :]
    X_val = X[-4 * (3600 // dt) * 24:-2 * (3600 // dt) * 24, :, :]
    X_test = X[-2 * (3600 // dt) * 24:, :, :]

    # For now only sensor 24
    y_train = y[:-4 * (3600 // dt) * 24, :, 0]
    y_val = y[-4 * (3600 // dt) * 24:-2 * (3600 // dt) * 24, :, 0]
    y_test = y[-2 * (3600 // dt) * 24:, :, 0]

    tuner = RandomSearch(FuturePredictionModelHyperparameters(
        window_size=window_size,
        num_features=X.shape[2],
        future_steps=future_steps),
                         objective='val_loss',
                         max_trials=max_trials,
                         directory='test_dir')

    tuner.search_space_summary()

    tuner.search(x=X_train,
                 y=y_train,
                 epochs=epochs,
                 batch_size=batch_size,
                 validation_data=(X_val, y_val),
                 callbacks=[IsNanEarlyStopper(monitor='loss')])

    tuner.results_summary()
예제 #4
0
def search_model():
    data = get_data()
    data_train, data_val_model, data_val_interpretation, data_test = get_train_val_test_splits(data)

    train_features = np.load("train_features.npy")
    valid_features = np.load("valid_features.npy")
    train_y = data_train["outcome"].values
    valid_y = data_val_model["outcome"].values
    tuner = RandomSearch(
        get_hp_model,
        objective='val_accuracy',
        max_trials=20,
        executions_per_trial=1,
        directory='test',
        project_name='test')

    tuner.search(train_features, y=train_y, batch_size=32, epochs=300,
                 validation_data=(valid_features, valid_y), verbose=2,
                 class_weight=dict(enumerate(utils.get_class_weights(train_y))),
                 # callbacks=[EarlyStopping(patience=30)]
                 )
    tuner.results_summary()
예제 #5
0
        activation='relu'
    ),
            # in dense layer we can select how many no of nodes we can use between 32 -128. lot of permu and combi to select
    keras.layers.Dense(10, activation='softmax')
    # the last dense layer with 10 output nodes
  ])
  
  model.compile(optimizer=keras.optimizers.Adam(hp.Choice('learning_rate', values=[1e-2, 1e-3])),
              loss='sparse_categorical_crossentropy',
              metrics=['accuracy'])
  
  return model

from kerastuner import RandomSearch # random search will try to find out which parameter will be best for problem statement
from kerastuner.engine.hyperparameters import HyperParameters

# run random search
# which conv layer and how many filters need to use

tuner_search = RandomSearch(build_model,objective='val_accuracy',max_trials=5,directory='CatsVsDogs',project_name='Mnist Fashion')

# will search for best parameters from the build_model
tuner_search.search(train_images,train_labels,epochs=3,validation_split=0.1) # will run only for 2 epochs by default. similar to fit 

model = tuner_search.get_best_models(num_models=1)[0]

model.summary()

# considering the above model, we'll train the images

model.fit(train_images,train_labels,epochs=10,validation_split=0.1,initial_epoch=3)
예제 #6
0
def hyper_tuner(month_path, log_path, time_step, train_data, valid_data,
                test_data, item, item_index, item_name):

    train_x, valid_x, test_x, train_y, valid_y, test_y = data_split_norm(
        train_data, valid_data, test_data, item_index, time_step)

    hyper_model = MyHyperModel(input_shape=train_x.shape[-2:])

    project_name = item_name + "_" + str(time_step)

    tuner = RandomSearch(hyper_model,
                         objective='val_loss',
                         max_trials=150,
                         executions_per_trial=5,
                         directory=log_path,
                         project_name=project_name)

    callbacks = [
        tf.keras.callbacks.EarlyStopping(monitor='val_loss',
                                         min_delta=1e-3,
                                         patience=15,
                                         restore_best_weights=True),
        tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',
                                             factor=0.1,
                                             patience=5,
                                             verbose=0,
                                             mode='auto',
                                             min_delta=0.0001),
        ClearTrainingOutput()
    ]

    tuner.search_space_summary()

    tuner.search(train_x,
                 train_y,
                 epochs=200,
                 batch_size=256,
                 validation_data=(valid_x, valid_y),
                 callbacks=callbacks,
                 verbose=2)

    # Get the optimal hyperparameters
    best_hps = tuner.get_best_hyperparameters(num_trials=1)[0]

    model = tuner.hypermodel.build(best_hps)
    history = model.fit(train_x,
                        train_y,
                        epochs=200,
                        batch_size=256,
                        validation_data=(valid_x, valid_y),
                        callbacks=callbacks,
                        verbose=2)
    models_path = os.path.join(month_path, "models")
    if not os.path.exists(models_path):
        os.mkdir(models_path)
    model_name = item + "_model.h5"
    model_path = os.path.join(models_path, model_name)
    model.save(model_path)
    # 清除tuner
    del tuner

    layers_path = os.path.join(month_path, item)
    if not os.path.exists(layers_path):
        os.mkdir(layers_path)
    plot_path_1 = os.path.join(layers_path, "loss_epoch.png")
    plot_path_2 = os.path.join(layers_path, "measure_predict.png")
    csv_path = os.path.join(layers_path, "measure_predict.csv")

    history = history.history

    plt.plot(history['loss'], linewidth=2, label='Train')
    plt.plot(history['val_loss'], linewidth=2, label='Test')
    plt.legend(loc='upper right')
    plt.ylabel('Mean Relative Error [$' + item + '$]')
    plt.xlabel('Epoch')
    plt.savefig(plot_path_1, dpi=300, bbox_inches="tight")
    plt.close()

    test_predictions = model.predict(test_x)
    plt.figure(figsize=(20, 6))
    plt.plot(test_y, label='measure')
    plt.plot(test_predictions, label='predict')
    plt.legend(loc='upper right')
    if item is 'PH':
        plt.ylabel(item)
    else:
        plt.ylabel(item + '(mg/L)')
    plt.xlabel('Test set')
    plt.savefig(plot_path_2, dpi=300, bbox_inches="tight")
    plt.close()

    test_predictions = test_predictions.flatten()  # flatten()降为一维
    valid_predictions = model.predict(valid_x).flatten()

    train_fvu, train_rmse, train_mre = history['loss'][-1], history[
        'root_mean_squared_error'][-1], history[
            "mean_absolute_percentage_error"][-1]

    valid_fvu, valid_rmse, valid_mre = model.evaluate(valid_x,
                                                      valid_y,
                                                      verbose=0)
    valid_nse, valid_cc = calc_nse_cc(valid_y, valid_predictions)
    test_fvu, test_rmse, test_mre = model.evaluate(test_x, test_y, verbose=0)

    test_nse, test_cc = calc_nse_cc(test_y, test_predictions)

    measure_predict_data = pd.DataFrame()
    measure_predict_data['predict'] = test_predictions
    measure_predict_data['measure'] = test_y
    measure_predict_data.to_csv(csv_path, encoding='utf-8')
    try:
        hidden_layer_1 = best_hps.get('units_0')
    except:
        hidden_layer_1 = 0
    try:
        hidden_layer_2 = best_hps.get('units_1')
    except:
        hidden_layer_2 = 0

    model_info = {
        'output': item,
        "time_step": time_step,
        'hidden_layer_1': hidden_layer_1,
        'hidden_layer_2': hidden_layer_2,
        'hidden_layer_3': best_hps.get('units_n'),
        'train_fvu': train_fvu,
        'train_rmse': train_rmse,
        'train_mre': train_mre,
        "valid_fvu": valid_fvu,
        'valid_nse': valid_nse,
        'valid_rmse': valid_rmse,
        'valid_mre': valid_mre,
        'valid_cc': valid_cc,
        "test_fvu": test_fvu,
        'test_nse': test_nse,
        'test_rmse': test_rmse,
        'test_mre': test_mre,
        'test_cc': test_cc
    }
    return model_info
예제 #7
0
    model = get_model(input_size=118, output_size=2, magic='tanh', dropout=0.5)
    tuner = RandomSearch(get_tuned_model,
                         objective='val_binary_accuracy',
                         max_trials=7,
                         executions_per_trial=4,
                         directory='project',
                         project_name='Air Quality Index')

    print(tuner.search_space_summary())

    x_train = np.asarray(x_train).astype(np.float32)
    y_train = np.asarray(y_train).astype(np.float32)
    x_val = np.asarray(x_val).astype(np.float32)
    y_val = np.asarray(y_val).astype(np.float32)

    tuner.search(x_train, y_train, epochs=5, validation_data=(x_val, y_val))

    print(tuner.results_summary(num_trials=3))  # 3 best models

    test_acc, test_loss = fit_and_evaluate(model,
                                           x_train,
                                           y_train,
                                           x_val,
                                           y_val,
                                           batch_size=8192,
                                           epochs=100)

    h1n1_preds, seasonal_preds = make_predictions(model, x_train)
    h1n1_true, seasonal_true = y_train[:, 0].tolist(), y_train[:, 1].tolist()
    train_score = get_scores(h1n1_true, h1n1_preds, seasonal_true,
                             seasonal_preds)
예제 #8
0
training_data_x, training_data_y = generate_training_data(display,clock)

"""Создание тюнера"""
tuner = RandomSearch(
    build_model,
    objective='val_accuracy',
    max_trials=30,
    directory='C:\\Users\\agolovanov\\PycharmProjects\\Snake_tuner\\model'  # каталог, куда сохраняются обученные сети
)

tuner.search_space_summary()

tuner.search((np.array(training_data_x).reshape(-1,7)),
             ( np.array(training_data_y).reshape(-1,3)),
             batch_size=2048,
             epochs=7,
             validation_split=0.2,
             verbose=1

             )
tuner.results_summary()

# Вывод лучших моделей
models = tuner.get_best_models(num_models=10)

with open("result.txt",'w') as f:
    f.write(models)


    def start_hyper_parameter_tuning(self, split_number):
        """
            Method is responsible to find best RNN model fit the split data

            1. Use Keras Tuner to find best possible configuration for the RNN Model
            2. Train the candidate models
            3. Find the best performed model and train the model on the split data.
            4. After the training;
                4.1 Plot and save the model architecture to the file system.
                4.2 Save the RNN Model to the file system
                4.3 Save the training data (plots and csv) to the file system
            5. Append the best performed model to best_possible_models list to compare all the models easily after all the split training is over.

        :param split_number: The number of the split data
        """
        self.log_event('Training with HyperParameter Tuning is started..')

        tuner = RandomSearch(
            self.build_model,
            objective=self.hyper_parameters['objective_function'],
            max_trials=self.max_trials,
            executions_per_trial=self.executions_per_trial,
            seed=self.random_state,
            project_name=f'split_{str(split_number)}',
            directory=os.path.normpath(self.path_tuner_directory))

        tuner.search(self.train_features,
                     self.train_targets,
                     epochs=self.epochs,
                     batch_size=self.batch_size,
                     verbose=2,
                     validation_data=(self.test_features, self.test_targets))

        # Get the trials
        trials = tuner.oracle.trials
        best_model = tuner.oracle.get_best_trials()[0].trial_id

        self.model = tuner.get_best_models(num_models=1)[0]

        self.model_history = self.model.fit(
            self.train_features,
            self.train_targets,
            epochs=self.epochs,
            batch_size=self.batch_size,
            verbose=2,
            validation_data=(self.test_features, self.test_targets))

        self.print_hyper_parameter_results(split_number=split_number,
                                           trials=trials,
                                           best_model=best_model)

        keras.utils.plot_model(
            self.model,
            to_file=f'{self.path_model_directory}{self.model_alias}.png',
            show_shapes=True,
            show_layer_names=True)

        current_time = datetime.datetime.now()
        save_path = f'{self.path_tuner_directory}/split_{str(split_number)}/split_{str(split_number)}_{self.model_alias}_{str(current_time.strftime("%Y-%m-%d_%H-%M-%S"))}.h5'
        self.model.save(save_path)

        hist_df = pd.DataFrame(self.model_history.history)
        hist_df.to_csv(
            f'{self.path_tuner_directory}/split_{str(split_number)}/split_{str(split_number)}_{self.model_alias}_best_model_history.csv',
            index=False,
            header=True)
        self.best_possible_models.append(hist_df)

        self.plot_train_summary(plot_title=f'Split {str(split_number)}',
                                split_number=str(split_number))
예제 #10
0
        optimizer=hp.Choice('optimizer', values=['adam', 'rmsprop', 'SGD']),
        loss='categorical_crossentropy', metrics=['accuracy']
    )

    return model


tuner = RandomSearch(
    build_model,
    objective='val_accuracy',
    max_trials=336,
    directory='test_directory'
)

tuner.search_space_summary()
tuner.search(x_train, y_train, batch_size=256, epochs=70, validation_split=0.1)

tuner.results_summary()

models = tuner.get_best_models(num_models=3)

for m in models:
    m.summary()
#    m.evaluate(x_test, y_test)
#    print()

top = tuner.get_best_models(num_models=1)[0]
history = top.fit(
    x_train, y_train, batch_size=256, epochs=70,
    validation_split=0.1, verbose=1
)
예제 #11
0
    # Build model
    model = keras.Model(inputs, outputs)
    model.compile(optimizer=Adam(lr),
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    return model


# Initialize the tuner by passing the `build_model` function
# and specifying key search constraints: maximize val_acc (objective),
# and the number of trials to do. More efficient tuners like UltraBand() can
# be used.
tuner = RandomSearch(build_model, objective='val_accuracy', max_trials=TRIALS,
                     project_name='hello_world_tutorial_results')

# Display search space overview
tuner.search_space_summary()

# Perform the model search. The search function has the same signature
# as `model.fit()`.
tuner.search(x_train, y_train, batch_size=128, epochs=EPOCHS,
             validation_data=(x_val, y_val))

# Display the best models, their hyperparameters, and the resulting metrics.
tuner.results_summary()

# Retrieve the best model and display its architecture
best_model = tuner.get_best_models(num_models=1)[0]
best_model.summary()
예제 #12
0
  #OUTPUT LAYER
  model.add(Dense(units = 1, activation = 'sigmoid'))

  model.compile(optimizer= 'Adam', loss= 'binary_crossentropy', metrics= ['accuracy'])

  return model

from kerastuner import RandomSearch
from kerastuner.engine.hyperparameters import HyperParameters

tuner_search = RandomSearch(create_model,
                            objective='val_accuracy',
                            max_trials=5,directory=basepath,
                            project_name='Tuner_Folder')

tuner_search.search(train_X,train_y,epochs=3,validation_split=0.2)

model = tuner_search.get_best_models(num_models=1)[0]

model.summary()

model_history = model.fit(train_X,train_y,epochs=10, validation_split=0.1,initial_epoch=3,callbacks= [
        tf.keras.callbacks.ModelCheckpoint('models/model_{val_accuracy:.3f}.h5',
        save_best_only = True,save_weights_only = False,
        monitor = 'val_accuracy')
    ])

model.save('model_final.h5')

import matplotlib.pyplot as plt
예제 #13
0
        keras.layers.Flatten(),
        keras.layers.Dense(units=hp.Int('dense_1_units',
                                        min_value=32,
                                        max_value=128,
                                        step=16),
                           activation='relu'),
        keras.layers.Dense(units=10, activation='softmax')
    ])
    model.compile(optimizer=keras.optimizers.Adam(
        hp.Choice('learning_rate', values=[1e-2, 1e-3])),
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])
    return model


from kerastuner import RandomSearch
from kerastuner.engine.hyperparameters import HyperParameters

model_tuning = RandomSearch(build_model,
                            objective='val_accuracy',
                            max_trials=5)

model_tuning.search(images_train, label_train, epochs=3, validation_split=0.2)

best_model = model_tuning.get_best_models(num_models=1)[0]

best_model.fit(images_train,
               label_train,
               epochs=10,
               validation_split=0.2,
               initial_epoch=3)
예제 #14
0
                                           min_value=32,
                                           max_value=128,
                                           step=16),
                            kernel_size=hp.Int('Conv_2_Kernel', 3, 5),
                            activation='relu'),
        keras.layers.Flatten(),
        keras.layers.Dense(units=hp.Int('dense_units_1',
                                        min_value=30,
                                        max_value=128,
                                        step=16),
                           activation='relu'),
        keras.layers.Dense(10, activation='softmax')
    ])
    model.compile(optimizer=keras.optimizers.Adam(
        hp.Float('learning_rate', 0.1, 0.2)),
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])
    return model


tuner_search = RandomSearch(build_model,
                            objective='val_accuracy',
                            max_trials=5,
                            executions_per_trial=1,
                            directory='C:/',
                            project_name="mnist_fashion")
tuner_search.search(train_images,
                    data_set_train_label,
                    epochs=3,
                    validation_split=0.1,
                    batch_size=1000)
                      optimizer='adam',
                      metrics=['accuracy'])
        return model


LOG_DIR = f"{int(time.time())}"

tuner = RandomSearch(build_model,
                     objective="val_accuracy",
                     max_trials=1,
                     executions_per_trial=1,
                     directory=LOG_DIR)

tuner.search(x=X_train,
             y=y_train,
             epochs=150,
             batch_size=64,
             validation_split=0.1)
print(tuner.get_best_hyperparameters()[0].values)
print(tuner.results_summary())

best_model = tuner.get_best_models(num_models=1)[0]
NN_pred = best_model.predict(X_test)
pred = list()
for i in range(len(NN_pred)):
    pred.append(np.argmax(NN_pred[i]))
test = list()
for i in range(len(y_test)):
    test.append(np.argmax(y_test[i]))
train_pred = best_model.predict(X_train)
train_prediction = list()
예제 #16
0
        model.add(layers.Dropout(rate=0.5))
        model.add(layers.Dense(6, activation='softmax'))

        model.compile(optimizer=Optimizer.Adam(learning_rate=hp.Choice(
            'learning_rate', values=[1e-2, 1e-3, 1e-4])),
                      loss='sparse_categorical_crossentropy',
                      metrics=['accuracy'])

        return model


# Initialize the model
hypermodel = RegressionHyperModel()

# Run the random search
tuner_rs = RandomSearch(hypermodel,
                        objective='val_accuracy',
                        max_trials=5,
                        executions_per_trial=1)

tuner_rs.search_space_summary()
tuner_rs.search(train_ds, train_classes, epochs=15, validation_split=0.30)

# Get the best model
best_model = tuner_rs.get_best_models(num_models=1)[0]

# Evaluate
val_ds, val_classes = getImages(
    '../input/intel-image-classification/seg_test/seg_test/', 150)
best_model.evaluate(val_ds, val_classes, verbose=1)
예제 #17
0
            metrics=['accuracy']
        )

        return model


def print_return(v):
    print(v.shape)
    return v


if __name__ == '__main__':
    # tf.keras.backend.set_floatx('float16')

    dt = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
    hypermodel = HCNN()
    train, test = get_mnist()
    tuner = RandomSearch(
        hypermodel,
        objective='accuracy',
        max_trials=40,
        directory='models',
        project_name='H-MNIST-' + dt
    )

    print('Created tuner')
    log_dir = "logs/fit/" + dt
    tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
    tuner.search(*train, epochs=30, validation_data=test, batch_size=32,
                 callbacks=[tensorboard_callback])
예제 #18
0
y_train = to_categorical(y_train, NUM_CLASSES)
y_test = to_categorical(y_test, NUM_CLASSES)

# Import an hypertunable version of Xception.
hypermodel = HyperXception(input_shape=x_train.shape[1:], classes=NUM_CLASSES)

# Initialize the hypertuner: we should find the model that maximixes the
# validation accuracy, using 40 trials in total.
tuner = RandomSearch(hypermodel,
                     objective='val_accuracy',
                     max_trials=40,
                     project_name='cifar10_xception',
                     directory='test_directory')

# Display search overview.
tuner.search_space_summary()

# Performs the hypertuning.
tuner.search(x_train, y_train, epochs=10, validation_split=0.1)

# Show the best models, their hyperparameters, and the resulting metrics.
tuner.results_summary()

# Retrieve the best model.
best_model = tuner.get_best_models(num_models=1)[0]

# Evaluate the best model.
loss, accuracy = best_model.evaluate(x_test, y_test)
print('loss:', loss)
print('accuracy:', accuracy)
예제 #19
0
    ),
    keras.layers.Flatten(),
    keras.layers.Dense(
        units=hp.Int('dense_1_units', min_value=32, max_value=128, step=16),
        activation='relu'
    ),
    keras.layers.Dense(10, activation='softmax')
  ])
  
  model.compile(optimizer=keras.optimizers.Adam(hp.Choice('learning_rate', values=[1e-2, 1e-3])),
              loss='sparse_categorical_crossentropy',
              metrics=['accuracy'])
  
  return model

from kerastuner import RandomSearch
from kerastuner.engine.hyperparameters import HyperParameters

tuner_search=RandomSearch(build_model,
                          objective='val_accuracy',
                          max_trials=5,directory='output',project_name="Mnist Fashion")

tuner_search.search(train_images,train_labels,epochs=3,validation_split=0.1)

model = tuner_search.get_best_models(num_models=1)[0]

model.summary()

model.fit(train_images,train_labels, epochs=10, validation_split=0.1, initial_epoch=3)

예제 #20
0
                            objective='mse',
                            seed=42,
                            max_trials=30,
                            executions_per_trial=2,
                            directory=log_dir)
    callbacks = [
        tf.keras.callbacks.EarlyStopping(monitor='val_loss',
                                         mode='min',
                                         patience=100,
                                         restore_best_weights=True,
                                         min_delta=0.00001,
                                         verbose=1)
    ]
    tuner_rs.search(x_train_simple_net,
                    y_train_simple_net,
                    epochs=1000,
                    validation_split=0.2,
                    verbose=0,
                    callbacks=callbacks)

    best_hp = tuner_rs.get_best_hyperparameters(num_trials=10)
    model = tuner_rs.hypermodel.build(best_hp)

    # run1 = {'units': 8, 'activation': 'relu', 'kernel_initializer': 'random_normal', 'bias_initializer': 'lecun_normal', 'regularizers.l2': 0.001, 'learning_rate': 0.001}
    # run2 = {'units': 8, 'activation': 'elu', 'kernel_initializer': 'random_normal', 'bias_initializer': 'lecun_normal', 'regularizers.l2': 0.001, 'learning_rate': 0.001}
    # run3 = {'units': 8, 'activation': 'elu', 'kernel_initializer': 'random_uniform', 'bias_initializer': 'random_normal', 'regularizers.l2': 0.001, 'learning_rate': 0.001}
    # run4 = {'units': 6, 'activation': 'elu', 'kernel_initializer': 'random_normal', 'bias_initializer': 'random_uniform', 'regularizers.l2': 0.001, 'learning_rate': 0.001}
    # run5 = {'units': 8, 'activation': 'elu', 'kernel_initializer': 'random_uniform', 'bias_initializer': 'lecun_normal', 'regularizers.l2': 0.001, 'learning_rate': 0.001}

    best_model = tuner_rs.get_best_models(num_models=1)[0]
    # loss, mse = best_model.evaluate(x_valid_simple_net, y_valid_simple_net)
    model.add(Dense(2, activation="softmax"))
    model.summary()

    model.compile(loss='categorical_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])

    return model


tuner_search = RandomSearch(build_model,
                            objective='accuracy',
                            max_trials=5,
                            directory=r"C:\Users\khares\Work\output13")

tuner_search.search(training_set, epochs=5)

model = tuner_search.get_best_models(num_models=1)[0]

model.summary()

best_hyperparameters = tuner_search.get_best_hyperparameters(1)[0].values
best_hyperparameters

model = Sequential()
model.add(
    Conv2D(filters=48,
           kernel_size=5,
           padding="same",
           activation="relu",
           input_shape=(224, 224, 3)))
예제 #22
0
tuner = RandomSearch(
    hypermodel,
    objective='val_loss',
    max_trials=20,
    # hyperband_iterations=20,
    project_name='UCI_inceptiontime',
    directory='nas_result')

# Display search overview.
tuner.search_space_summary()

# Performs the hypertuning.
tuner.search(x_train,
             y_train,
             epochs=100,
             validation_split=0.1,
             batch_size=128,
             callbacks=[keras.callbacks.EarlyStopping(patience=10)])

# Show the best models, their hyperparameters, and the resulting metrics.
tuner.results_summary()

# Retrieve the best model.
best_models = tuner.get_best_models(num_models=10)

# Evaluate the best model.
for i in range(10):
    loss, accuracy, precision, recall, f1 = best_models[i].evaluate(
        x_test, y_test)
    print('*************************----best_model_' + str(i) +
          '----*************************')
예제 #23
0
def hyper_tuner(log_path, time_step, train_data, valid_data, test_data, item,
                item_index, item_name):
    keras.backend.clear_session()

    train_x, valid_x, test_x, train_y, valid_y, test_y = data_split_norm(
        train_data, valid_data, test_data, item_index, time_step)

    hyper_model = MyHyperModel(input_shape=train_x.shape[-2:])

    project_name = item_name + "_" + str(time_step)

    tuner = RandomSearch(hyper_model,
                         objective='val_loss',
                         max_trials=150,
                         executions_per_trial=10,
                         directory=log_path,
                         project_name=project_name)

    callbacks = [
        tf.keras.callbacks.EarlyStopping(monitor='val_loss',
                                         min_delta=1e-3,
                                         patience=15,
                                         restore_best_weights=True),
        tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',
                                             factor=0.1,
                                             patience=5,
                                             verbose=0,
                                             mode='auto',
                                             min_delta=0.0001),
        ClearTrainingOutput()
    ]

    tuner.search_space_summary()

    tuner.search(train_x,
                 train_y,
                 epochs=200,
                 batch_size=256,
                 validation_data=(valid_x, valid_y),
                 callbacks=callbacks,
                 verbose=2)

    # Get the optimal hyperparameters
    best_hps = tuner.get_best_hyperparameters(num_trials=1)[0]

    try:
        hidden_layer_1 = best_hps.get('units_0')
    except:
        hidden_layer_1 = 0
    try:
        hidden_layer_2 = best_hps.get('units_1')
    except:
        hidden_layer_2 = 0

    model_info = {
        'output': item,
        "time_step": time_step,
        'hidden_layer_1': hidden_layer_1,
        'hidden_layer_2': hidden_layer_2,
        'hidden_layer_3': best_hps.get('units_n'),
    }

    # 清除tuner
    del tuner

    return model_info
예제 #24
0
        units=hp.Int('dense_1_units', min_value=32, max_value=128, step=16), #Dense Layer b/w 32-128
        activation='relu'
    ),
    keras.layers.Dense(10, activation='softmax')
  ])
  
  model.compile(optimizer=keras.optimizers.Adam(hp.Choice('learning_rate', values=[1e-2, 1e-3])),
              loss='sparse_categorical_crossentropy',
              metrics=['accuracy'])
  
  return model

import kerastuner
from kerastuner import RandomSearch
from kerastuner.engine.hyperparameters import HyperParameters

tuner_search=RandomSearch(build_model,
                          objective='val_accuracy',
                          max_trials=5,directory='output',project_name="Mnist Fashion")

tuner_search.search(train_images,train_labels,epochs=3,validation_split=0.1) #tuner_search.search is best parameter from the build_model

#for the best model from the accuracy
model=tuner_search.get_best_models(num_models=1)[0]

model.summary()

#Retrain the above the model
model.fit(train_images, train_labels, epochs=10, validation_split=0.1, initial_epoch=3)

예제 #25
0
  model.add(keras.layers.Dense(1, activation='sigmoid'))
  hp_learning_rate = hp.Choice('learning_rate', values = [1e-2, 1e-3, 1e-4]) 
  
  model.compile(optimizer = keras.optimizers.Adam(learning_rate = hp_learning_rate),
                loss='binary_crossentropy', 
                metrics = ['accuracy'])
  
  return model

!pip install keras.tuner

from tensorflow import keras
from kerastuner import RandomSearch
tuner_search=RandomSearch(model_builder,
                     objective = 'val_accuracy', 
                     max_trials = 5,
                     directory = 'malaria_output',
                     project_name = 'Malaria_Detection')

tuner_search.search(x_train, y_train, epochs=3, validation_split=0.1)

model=tuner_search.get_best_models(num_models=1)[0]
model.summary()

from tensorflow.keras.callbacks import EarlyStopping
early_stop = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=3)

model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=50, initial_epoch=3, callbacks=[early_stop])

model.save('malariadisease.h5')
    model.compile(loss='categorical_crossentropy',
                  optimizer=Adam(
                      hp.Choice('learning_rate', values=[1e-4, 1e-3])),
                  metrics=['accuracy'])
    return model


from kerastuner import RandomSearch
from kerastuner.engine.hyperparameters import HyperParameters

tuner = RandomSearch(build_model,
                     objective='val_accuracy',
                     max_trials=5,
                     directory='output1',
                     project_name='EMNIST_Balanced_Tuned')
tuner.search(X_train, y_train, epochs=3, validation_data=(X_val, y_val))

model = tuner.get_best_models(num_models=1)[0]
# model.summary()

from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint

model = tuner.get_best_models(num_models=1)[0]

# Some callbacks that we're using

MCP = ModelCheckpoint('Best_points.h5',
                      verbose=1,
                      save_best_only=True,
                      monitor='val_accuracy',
                      mode='max')
예제 #27
0
                            activation='relu'),
        keras.layers.MaxPooling2D((2, 2)),
        keras.layers.Flatten(),
        keras.layers.Dense(units=hp.Int('dense_1_units',
                                        min_value=32,
                                        max_value=128,
                                        step=16),
                           activation='relu'),
        keras.layers.Dense(10, activation='softmax')
    ])

    model.compile(optimizer=keras.optimizers.Adam(
        hp.Choice('learning_rate', values=[1e-2, 1e-3])),
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])

    return model


from kerastuner import RandomSearch
from kerastuner.engine.hyperparameters import HyperParameters

tuner = RandomSearch(build_model,
                     objective='val_accuracy',
                     max_trials=5,
                     directory='output',
                     project_name="CIFAR10")

# model.fit(X_Train ,y_train)
tuner.search(X_train, y_train, epochs=3, validation_split=0.1)