Exemple #1
0
def train_resnet50():

    # generating data from existing images
    train_datagenerator, test_datagenerator = data_generator()

    # getting the model and callback from model.py
    lr_reduction = ReduceLROnPlateau(monitor='val_loss',
                                     factor=0.1,
                                     patience=3,
                                     min_lr=1e-5)

    tuner = RandomSearch(project_name=os.path.join(LOGS, 'trial_2/resnet_50'),
                         max_trials=3,
                         executions_per_trial=5,
                         hypermodel=vgg_16,
                         objective='val_accuracy')
    tuner.search(train_datagenerator,
                 epochs=10,
                 callbacks=[lr_reduction],
                 validation_data=test_datagenerator)
    best_hps = tuner.get_best_hyperparameters(num_trials=1)[0]

    model = tuner.hypermodel.build(best_hps)
    model.fit_generator(train_datagenerator,
                        epochs=EPOCHS,
                        validation_data=test_datagenerator)

    return model
Exemple #2
0
def tuneCNN(X, y, num_classes):

    # creates hypermodel object based on the num_classes and the input shape
    hypermodel = CNNHyperModel(input_shape=(224, 224, 3),
                               num_classes=num_classes)

    # tuners, establish the object to look through the tuner search space
    tuner = RandomSearch(
        hypermodel,
        objective='val_accuracy',
        seed=42,
        max_trials=3,
        executions_per_trial=3,
        directory='random_search',
    )

    X_train, X_test, y_train, y_test = train_test_split(np.asarray(X),
                                                        np.asarray(y),
                                                        test_size=0.33,
                                                        random_state=42)

    # searches the tuner space defined by hyperparameters (hp) and returns the
    # best model
    tuner.search(X_train,
                 y_train,
                 validation_data=(X_test, y_test),
                 callbacks=[tf.keras.callbacks.EarlyStopping(patience=1)])

    # returns the best model
    return tuner.get_best_models(1)[0]
Exemple #3
0
        def build_model(hp):
            x_train = np.random.random((100, 28, 28))
            y_train = np.random.randint(10, size=(100, 1))
            x_test = np.random.random((20, 28, 28))
            y_test = np.random.randint(10, size=(20, 1))

            model = tf.keras.models.Sequential([
                tf.keras.layers.Flatten(input_shape=(28, 28)),
                tf.keras.layers.Dense(128, activation='relu'),
                tf.keras.layers.Dropout(hp.Choice('dropout_rate', values=[0.2, 0.4])),
                tf.keras.layers.Dense(10, activation='softmax')
            ])

            model.compile(
                optimizer='adam',
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy'])

            return model

            tuner = RandomSearch(build_model, objective='accuracy', max_trials=1, executions_per_trial=1, seed=1)

            tuner.search(x_train, y_train, epochs=1)

            self.assertEqual(0.4, tuner.get_best_hyperparameters(1)[0].get('dropout_rate'))
    def tune(self):
        """ TODO: actually this should be def tune(..) - will have to reoncile/fix the nomentalures at some point """
        tuner = RandomSearch(self.model,
                             objective='val_accuracy',
                             max_trials=50,
                             executions_per_trial=2,
                             directory='my_dir',
                             project_name='helloworld')

        # proprocess the data
        data = self.data_reduced
        LOG.info(f"BUILD: data from keras: {data.keys()}")

        x = data["x_train"]
        y = data["y_train"]
        x_val = data["x_test"]
        y_val = data["y_test"]

        print(f"label shapes: {y.shape} {y_val.shape}")
        # call the tuner on that

        try:
            tuner.search(x, y, epochs=3, validation_data=(x_val, y_val))
        except Exception as ex:
            LOG.error(f"Failed to tuner.search: {ex}")
        models = tuner.get_best_models(num_models=2)

        # self.tuner_model.fit(data["x_train"],data["y_train"])
        LOG.info(f"Finished tuning model. Summary:")
Exemple #5
0
def CNN_Hyper():
    training_set = tf.keras.preprocessing.image_dataset_from_directory(
        DATA_PATH + "processed/training",
        seed=957,
        image_size=IMAGE_SIZE,
        batch_size=BATCH_SIZE,
    )
    validation_set = tf.keras.preprocessing.image_dataset_from_directory(
        DATA_PATH + "processed/validation",
        seed=957,
        image_size=IMAGE_SIZE,
        batch_size=BATCH_SIZE,
    )

    test_set = tf.keras.preprocessing.image_dataset_from_directory(
        DATA_PATH + "processed/testing",
        seed=957,
        image_size=IMAGE_SIZE,
        batch_size=BATCH_SIZE,
    )

    training_set = training_set.prefetch(buffer_size=32)
    validation_set = validation_set.prefetch(buffer_size=32)

    hyperModel = CNNHyperModel(IMAGE_SIZE + (3, ), CLASS_COUNT, "softmax")

    MAX_TRIALS = 20
    EXECUTION_PER_TRIAL = 1
    N_EPOCH_SEARCH = 25

    tuner = RandomSearch(hyperModel,
                         objective='val_accuracy',
                         seed=957,
                         max_trials=MAX_TRIALS,
                         executions_per_trial=EXECUTION_PER_TRIAL,
                         directory='random_search',
                         project_name='Stanford-Dogs-40_1')

    tuner.search_space_summary()

    tuner.search(training_set,
                 epochs=N_EPOCH_SEARCH,
                 validation_data=validation_set)

    # Show a summary of the search
    tuner.results_summary()

    # Retrieve the best model.
    best_model = tuner.get_best_models(num_models=1)[0]

    # Evaluate the best model.
    loss, accuracy = best_model.evaluate(test_set)
    print("Loss: ", loss)
    print("Accuracy: ", accuracy)
    best_model.summary()
    # Save model
    best_model.save('CNN_Tuned_Best_Model')


# https://www.sicara.ai/blog/hyperparameter-tuning-keras-tuner
 def search_bestCNN(self,
                    X,
                    Y,
                    testX,
                    testY,
                    epochs=50,
                    max_trails=20,
                    batch_size=64,
                    project_name='A1'):
     tuner = RandomSearch(self._build_CNN,
                          objective='val_accuracy',
                          max_trials=max_trails,
                          executions_per_trial=1,
                          directory='tunerlog',
                          project_name=project_name)
     tuner.search(x=X,
                  y=Y,
                  epochs=epochs,
                  batch_size=batch_size,
                  validation_data=(testX, testY),
                  callbacks=[
                      tf.keras.callbacks.EarlyStopping(monitor='val_loss',
                                                       patience=5)
                  ],
                  verbose=2)
     tuner.search_space_summary()
     print(tuner.results_summary())
     print('best_hyperparameters')
     print(tuner.get_best_hyperparameters()[0].values)
     return tuner.get_best_models()
Exemple #7
0
def tuneClass(X,
              y,
              num_classes,
              max_layers=10,
              min_layers=2,
              min_dense=32,
              max_dense=512,
              executions_per_trial=3,
              max_trials=1,
              activation='relu',
              loss='categorical_crossentropy',
              metrics='accuracy'):
    # function build model using hyperparameter
    le = preprocessing.LabelEncoder()
    y = tf.keras.utils.to_categorical(le.fit_transform(y),
                                      num_classes=num_classes)

    def build_model(hp):
        model = keras.Sequential()
        for i in range(hp.Int('num_layers', min_layers, max_layers)):
            model.add(
                layers.Dense(units=hp.Int('units_' + str(i),
                                          min_value=min_dense,
                                          max_value=max_dense,
                                          step=32),
                             activation=activation))
        model.add(layers.Dense(num_classes, activation='softmax'))
        model.compile(optimizer=keras.optimizers.Adam(
            hp.Choice('learning_rate', [1e-2, 1e-3, 1e-4])),
                      loss=loss,
                      metrics=[metrics])
        return model

    # tuners, establish the object to look through the tuner search space
    tuner = RandomSearch(build_model,
                         objective='loss',
                         max_trials=max_trials,
                         executions_per_trial=executions_per_trial,
                         directory='models',
                         project_name='class_tuned')

    # tuner.search_space_summary()

    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.2,
                                                        random_state=49)

    # searches the tuner space defined by hyperparameters (hp) and returns the
    # best model
    tuner.search(X_train, y_train, epochs=5, validation_data=(X_test, y_test))
    models = tuner.get_best_models(num_models=1)
    return models[0]
def main():

    dataset = makeHistoricalData(fixed_data, temporal_data, h, r, 'death',
                                 'mrmr', 'country', 'regular')

    numberOfSelectedCounties = len(dataset['county_fips'].unique())
    new_dataset = clean_data(dataset, numberOfSelectedCounties)
    X_train, y_train, X_val, y_val, X_test, y_test, y_train_date, y_test_date, y_val_date, val_naive_pred, test_naive_pred = preprocess(
        new_dataset)
    X_train, y_train, X_val, y_val, X_test, y_test, scalar = data_normalize(
        X_train, y_train, X_val, y_val, X_test, y_test)

    hypermodel = LSTMHyperModel(n=X_train.shape[2])

    tuner = RandomSearch(hypermodel,
                         objective='mse',
                         seed=1,
                         max_trials=60,
                         executions_per_trial=4,
                         directory='parameter_tuning',
                         project_name='lstm_model_tuning')

    tuner.search_space_summary()

    print()
    input("Press Enter to continue...")
    print()

    N_EPOCH_SEARCH = 50
    tuner.search(X_train, y_train, epochs=N_EPOCH_SEARCH, validation_split=0.2)

    print()
    input("Press Enter to show the summary of search...")
    print()

    # Show a summary of the search
    tuner.results_summary()

    print()
    input("Press Enter to retrive the best model...")
    print()

    # Retrieve the best model.
    best_model = tuner.get_best_models(num_models=1)[0]

    print()
    input("Press Enter to show best model summary...")
    print()

    best_model.summary()

    print()
    input("Press Enter to run the best model on test dataset...")
    print()

    # Evaluate the best model.
    loss, accuracy = best_model.evaluate(X_test, y_test)
    print("loss = " + str(loss) + ", acc = " + str(accuracy))
Exemple #9
0
def random_keras_tuner(compiled_model, objective='val_accuracy', max_trials=5,
                       executions_per_trial=3):
    tuner = RandomSearch(
        compiled_model,
        objective=objective,
        max_trials=max_trials,
        executions_per_trial=executions_per_trial,
        directory='cryptolytic-ds',
        project_name='cryptolytic'
    )
    tuner.results_summary()
    return tuner
def get_best_nn(data, num_output=1, **tuner_kw):
    """
    Find the "best" model based on `MyHyperModel` class.

    Parameters
    ----------
    data: numpy.array or similar
        The train and validation data to be used by the hyper parameter tuner.
    num_output: int, optional
        The number of outputs for our NN. 1 default for regression.
    tuner_kw: dictionary
        A dictionary of parameters to be  `RandomSearch` tuner.

    Returns
    -------
    The trained model instance with the "optimised" parameters.
    """
    # Load encoded data
    enc_data = data_encode(data, encoder='CatBoostEncoder')
    x_train_enc, y_train = enc_data.get('train_data')
    x_val_enc, y_val = enc_data.get('test_data')

    # Create an instance of the `MyHyperModel` class
    hyper_model = MyHyperModel(num_output=num_output,
                               nun_features=int(x_train_enc.shape[1]))

    # Default tuner params
    default_tuner_params = {
        'objective': 'val_loss',
        'max_trials': 10,
        'directory':
        'keras_tuner_output',  # Directory for logs, checkpoints, etc
        'project_name': 'sgsc'
    }  # Default is utils/keras_tuner_output

    # Update tuner params
    tuner_params = {**default_tuner_params, **tuner_kw}

    # Initialise tuner and run it
    tuner = RandomSearch(hyper_model, **tuner_params)
    # Check about seed!! We need to define it? or does it use numpy's by default?
    tuner.search(
        x_train_enc,
        y_train,
        epochs=5,  # Default number of epochs
        validation_data=(x_val_enc, y_val),
        verbose=0)

    # Get best model
    best_hp = tuner.get_best_hyperparameters()[0]
    best_model = tuner.hypermodel.build(best_hp)

    return best_model, best_hp
Exemple #11
0
def tuneReg(data,
            target,
            max_layers=10,
            min_layers=2,
            min_dense=32,
            max_dense=512,
            executions_per_trial=3,
            max_trials=1):
    print("entered1")

    # function build model using hyperparameter

    def build_model(hp):
        model = keras.Sequential()
        for i in range(hp.Int('num_layers', min_layers, max_layers)):
            model.add(
                layers.Dense(units=hp.Int('units_' + str(i),
                                          min_value=min_dense,
                                          max_value=max_dense,
                                          step=32),
                             activation='relu'))
        model.add(layers.Dense(1, activation='softmax'))
        model.compile(optimizer=keras.optimizers.Adam(
            hp.Choice('learning_rate', [1e-2, 1e-3, 1e-4])),
                      loss='mean_squared_error')
        return model

    # random search for the model
    tuner = RandomSearch(build_model,
                         objective='loss',
                         max_trials=max_trials,
                         executions_per_trial=executions_per_trial)

    # tuner.search_space_summary()
    # del data[target]

    X_train, X_test, y_train, y_test = train_test_split(data,
                                                        target,
                                                        test_size=0.2,
                                                        random_state=49)

    # searches the tuner space defined by hyperparameters (hp) and returns the
    # best model
    tuner.search(X_train,
                 y_train,
                 epochs=5,
                 validation_data=(X_test, y_test),
                 callbacks=[tf.keras.callbacks.TensorBoard('my_dir')])

    models = tuner.get_best_models(num_models=1)
    return models[0]
Exemple #12
0
    def _fit(self, X_train, y_train, X_test, y_test, X_val, y_val):
        tuner = RandomSearch(self._build_model,
                             objective='val_accuracy',
                             max_trials=self.max_trials,
                             executions_per_trial=1,
                             directory='logs/keras-tuner/',
                             project_name='cnn')

        tuner.search_space_summary()

        tuner.search(x=X_train,
                     y=y_train,
                     epochs=self.epochs,
                     batch_size=self.batch_size,
                     verbose=0,
                     validation_data=(X_val, y_val),
                     callbacks=[EarlyStopping('val_accuracy', patience=4)])
        print('kakkanat\n\n\n\n\n\n')
        print(tuner.results_summary())
        model = tuner.get_best_models(num_models=1)[0]
        print(model.summary())

        # Evaluate Best Model #
        _, train_acc = model.evaluate(X_train, y_train, verbose=0)
        _, test_acc = model.evaluate(X_test, y_test, verbose=0)
        print('Train: %.3f, Test: %.3f' % (train_acc, test_acc))
def define_tuners(hypermodel, directory, project_name):
    random_tuner = RandomSearch(
        hypermodel,
        objective="val_loss",
        seed=SEED,
        max_trials=MAX_TRIALS,
        executions_per_trial=EXECUTION_PER_TRIAL,
        directory=f"{directory}_random_search",
        project_name=project_name,
    )
    hyperband_tuner = Hyperband(
        hypermodel,
        max_epochs=HYPERBAND_MAX_EPOCHS,
        objective="val_loss",
        seed=SEED,
        executions_per_trial=EXECUTION_PER_TRIAL,
        directory=f"{directory}_hyperband",
        project_name=project_name,
    )
    bayesian_tuner = BayesianOptimization(
        hypermodel,
        objective='val_loss',
        seed=SEED,
        num_initial_points=BAYESIAN_NUM_INITIAL_POINTS,
        max_trials=MAX_TRIALS,
        directory=f"{directory}_bayesian",
        project_name=project_name
    )
    return [random_tuner, hyperband_tuner, bayesian_tuner]
Exemple #14
0
def search_hp(neumf, dataset):

    trainset = tf.data.TFRecordDataset(
        join('datasets', dataset) + '.trainset.tfrecord').repeat(-1).map(
            parse_function).shuffle(batch_size).batch(batch_size).prefetch(
                tf.data.experimental.AUTOTUNE)
    testset = tf.data.TFRecordDataset(
        join('datasets', dataset) + '.testset.tfrecord').repeat(-1).map(
            parse_function).shuffle(batch_size).batch(batch_size).prefetch(
                tf.data.experimental.AUTOTUNE)
    tuner = RandomSearch(neumf,
                         objective='val_accuracy',
                         max_trials=100,
                         directory='my_dir',
                         project_name='neumf')
    tuner.search(trainset, epochs=5, validation_data=testset)
    neumf.save('neumf.h5')
Exemple #15
0
def run_fn(fn_args):

  tf_transform_output = tft.TFTransformOutput(fn_args.transform_output)

  train_dataset = input_fn(fn_args.train_files, tf_transform_output, batch_size=100)
  eval_dataset = input_fn(fn_args.eval_files, tf_transform_output, batch_size=100)

  log_dir = os.path.join(os.path.dirname(fn_args.serving_model_dir), 'logs')
  tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, update_freq='batch')

  if True:
    print("Use normal Keras model")
    mirrored_strategy = tf.distribute.MirroredStrategy()
    with mirrored_strategy.scope():
      model = build_keras_model(None)
    model.fit(
        train_dataset,
        epochs=1,
        steps_per_epoch=fn_args.train_steps,
        validation_data=eval_dataset,
        validation_steps=fn_args.eval_steps,
        callbacks=[tensorboard_callback])
  else:
    print("Use normal Keras Tuner")
    tuner = RandomSearch(
        build_keras_model,
        objective='val_binary_accuracy',
        max_trials=5,
        executions_per_trial=3,
        directory=fn_args.serving_model_dir,
        project_name='tuner')
    tuner.search(
        train_dataset,
        epochs=1,
        steps_per_epoch=fn_args.train_steps, # or few steps to get best HP and then well fit
        validation_steps=fn_args.eval_steps,
        validation_data=eval_dataset,
        callbacks=[tensorboard_callback, tf.keras.callbacks.EarlyStopping()])
    tuner.search_space_summary()
    tuner.results_summary()
    best_hparams = tuner.oracle.get_best_trials(1)[0].hyperparameters.get_config()
    model = tuner.get_best_models(1)[0]

  signatures = {
      'serving_default': get_serve_tf_examples_fn(model, tf_transform_output).get_concrete_function(
          tf.TensorSpec(shape=[None],
                        dtype=tf.string,
                        name='input_example_tensor')),
  }

  model.save(fn_args.serving_model_dir, save_format='tf', signatures=signatures)
Exemple #16
0
def build_model(X_train, Y_train, X_test, Y_test):
    hyperModel = RegressionHyperModel((X_train.shape[1], ))

    tuner_rs = RandomSearch(hyperModel,
                            objective='mse',
                            max_trials=135,
                            executions_per_trial=1,
                            directory='param_opt_checkouts',
                            project_name='GDW')
    tuner_rs.search(X_train,
                    Y_train,
                    validation_data=(X_test, Y_test),
                    epochs=160)
    best_model = tuner_rs.get_best_models(num_models=1)[0]

    #metrics = ['loss', 'mse', 'mae', 'mape', 'cosine_proximity']
    #_eval = best_model.evaluate(X_test, Y_test)
    #print(_eval)
    #for i in range(len(metrics)):
    #    print(f'{metrics[i]} : {_eval[i]}')

    # history = best_model.fit(X_train, Y_train, validation_data = (X_test, Y_test), epochs=50)

    # best_model.save('./models_ANN/best_model')

    # save_model(best_model)
    tuner_rs.results_summary()
    print(load_model().summary())
    predict(best_model)
Exemple #17
0
    def train_model(self, epochs, batch_size=32):
        self.tuner = RandomSearch(self.build_model,
                                  objective='val_loss',
                                  max_trials=50,
                                  executions_per_trial=1,
                                  directory='logs/keras-tuner/',
                                  project_name='autoencoder')

        self.tuner.search_space_summary()

        self.tuner.search(x=self.X_train,
                          y=self.X_train,
                          epochs=24,
                          batch_size=32,
                          validation_data=(self.X_test, self.X_test),
                          callbacks=[EarlyStopping('val_loss', patience=3)])

        self.tuner.results_summary()
Exemple #18
0
    def random_search(self):
        tuner = RandomSearch(
            self.build_model,
            'mean_squared_error',
            self.max_trials,  # more than 2 and it crashes
            overwrite=True,
            directory=self.kt_dir
            # executions_per_trial=self.max_executions_per,
            # project_name=name
        )
        # try:
        tuner.search(x=self.data,
                     y=self.train_labels,
                     epochs=self.epochs,
                     batch_size=self.batch_size,
                     validation_data=(self.test_data, self.test_labels))
        # except ValueError:
        #     print('error')

        return tuner
Exemple #19
0
def tuneCNN(
        X_train,
        X_test,
        height,
        width,
        num_classes,
        patience=1,
        executions_per_trial=1,
        seed=42,
        max_trials=3,
        objective='val_accuracy',
        directory='my_dir',
        epochs=10,
        verbose=0,
        test_size=0.2):
    # creates hypermodel object based on the num_classes and the input shape
    hypermodel = CNNHyperModel(input_shape=(
        height, width, 3), num_classes=num_classes)

    # # tuners, establish the object to look through the tuner search space
    tuner = RandomSearch(
        hypermodel,
        objective=objective,
        seed=seed,
        max_trials=max_trials,
        executions_per_trial=executions_per_trial,
        directory=directory,
    )


    # searches the tuner space defined by hyperparameters (hp) and returns the
    # best model

    tuner.search(X_train,
                 validation_data=X_test,
                 callbacks=[tf.keras.callbacks.EarlyStopping(patience=patience)],
                 epochs=epochs,
                 verbose=verbose)

    # best hyperparamters
    hyp = tuner.get_best_hyperparameters(num_trials=1)[0]
    #hyp = tuner.oracle.get_best_trials(num_trials=1)[0].hyperparameters.values
    #best_hps = np.stack(hyp).astype(None)
    history = tuner_hist(
        X_train,
        X_test,
        tuner,
        hyp,
        img=1,
        epochs=epochs,
        verbose=verbose,
        test_size=test_size)

    """
    Return:
        models[0] : best model obtained after tuning
        best_hps : best Hyperprameters obtained after tuning, stored as array
        history : history of the data executed from the given model
    """
    return tuner.get_best_models(1)[0], hyp, history
def fit_hier_embedding(X, y, result_dir, project):
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)
    y_train = to_categorical(y_train, output_dim)
    y_test = to_categorical(y_test, output_dim)

    X_train1 = X_train[['Rating', 'CocoaPercent']].values
    X_train2 = X_train.drop(['Rating', 'CocoaPercent'], axis=1).values
    X_test1 = X_test[['Rating', 'CocoaPercent']].values
    X_test2 = X_test.drop(['Rating', 'CocoaPercent'], axis=1).values

    dim1 = X_train1.shape[1]
    dim2 = X_train2.shape[1]

    hp = HyperParameters()

    bm = lambda x: tune_optimizer_model(hp, dim1, dim2)

    print(dim1, dim2)
    tuner = RandomSearch(bm,
                         objective='val_accuracy',
                         max_trials=MAX_TRIALS,
                         executions_per_trial=EXECUTIONS_PER_TRIAL,
                         directory=result_dir,
                         project_name=project,
                         seed=32)

    TRAIN_EPOCHS = 1000

    tuner.search(x=[X_train1, X_train2],
                 y=y_train,
                 epochs=TRAIN_EPOCHS,
                 validation_data=([X_test1, X_test2], y_test))
    tuner.results_summary()
Exemple #21
0
def main(args):
    tv = FLAGS.tv
    vv = FLAGS.vv
    bs = FLAGS.bs

    project_name = f'tv{tv}-vv{vv}-bs{bs}'
    print(f'Project Name: {project_name}')
    print()
    tuner = RandomSearch(
        build_hyper_conv_estimator,
        objective='val_loss',
        max_trials=20,
        executions_per_trial=3,
        directory='hyper_search',
        project_name=project_name,
    )

    batch_size = 64
    batches = 4000
    workers = 2
    verbose = 2

    tuner.search_space_summary()
    dataset = TFSeqRandomDataGenerator(batch_size, batches)
    valid_dataset = TFSeqRandomDataGenerator(batch_size, 4000, version=1)
    tuner.search(dataset,
                 validation_data=valid_dataset,
                 epochs=10,
                 workers=workers,
                 use_multiprocessing=True,
                 verbose=verbose)
Exemple #22
0
def define_random_tuner(num_classes,
                        directory=Path("./"),
                        project_name="vanilla_cnn_tuning"):
    random_tuner = RandomSearch(
        HyperModels.CNNHyperModel(input_shape=(28, 28, 1),
                                  num_classes=num_classes),
        objective="val_loss",
        max_trials=40,
        executions_per_trial=2,
        directory=f"{directory}_random_search",
        project_name=project_name,
    )

    return random_tuner
Exemple #23
0
def KerasTuner(XTrain, YTrain, XValidation, YValidation):
    tuner = RandomSearch(buildModel,
                         objective='mse',
                         max_trials=30,
                         executions_per_trial=10,
                         directory='KerasTuner',
                         project_name=f'KerasTuner-{constants.NAME}')

    tuner.search_space_summary()

    tuner.search(XTrain,
                 YTrain,
                 epochs=5,
                 validation_data=(XValidation, YValidation))

    models = tuner.get_best_models(num_models=1)

    tuner.results_summary()

    return models
Exemple #24
0
def find_best_NN(x_train, y_train):
  tuner = RandomSearch(build_model, objective="loss", max_trials=10, executions_per_trial=1)
  print("\n\n\n")
  print('[INFO] start searching')
  tuner.search(x_train, y_train, batch_size=100, epochs=10, validation_split=0.2)
  print("\n\n\nRESULTS SUMMARY")
  tuner.results_summary()
  print("\n\n\n")
  print("\n\n\nHERE IS THE BEST MODEL\n\n\n")
  best_params = tuner.get_best_hyperparameters()[0]
  best_model = tuner.hypermodel.build(best_params)
  best_model.summary()
  return best_model
def run_tuner(hypermodel, hp):
    # load dataset
    train_dataset, test_dataset = load_data()

    # init tensorboard here so each run will have folder,
    # which we can rename based on trial_id
    tb_callback = get_tensorboard(TUNER_SETTINGS['log_dir'])

    tuner = RandomSearch(
        hypermodel,
        objective=TUNER_SETTINGS['objective'],
        max_trials=TUNER_SETTINGS['max_trials'],
        metrics=['accuracy'],
        loss='sparse_categorical_crossentropy',
        hyperparameters=hp,
        executions_per_trial=TUNER_SETTINGS['executions_per_trial'],
        directory=TUNER_SETTINGS['log_dir'],
        project_name=project_name)

    tuner.search(train_dataset,
                 validation_data=test_dataset,
                 batch_size=TUNER_SETTINGS['batch_size'],
                 callbacks=TUNER_SETTINGS['callbacks'] + [tb_callback],
                 epochs=TUNER_SETTINGS['epochs'])
def define_tuners(hypermodel, directory, project_name):
    random_tuner = RandomSearch(hypermodel,
                                objective='val_accuracy',
                                seed=SEED,
                                max_trials=MAX_TRIALS,
                                executions_per_trial=EXECUTION_PER_TRIAL,
                                directory=f'{directory}_random_search',
                                project_name=project_name)
    hyperband_tuner = Hyperband(hypermodel,
                                max_epochs=HYPERBAND_MAX_EPOCHS,
                                objective='val_accuracy',
                                seed=SEED,
                                executions_per_trial=EXECUTION_PER_TRIAL,
                                directory=f'{directory}_hyperband',
                                project_name=project_name)
    return [random_tuner, hyperband_tuner]
Exemple #27
0
def keras_tuner(x_train, y_train, x_test, y_test):
    from kerastuner.tuners import RandomSearch
    tuner = RandomSearch(build_model,
                         objective='val_accuracy',
                         max_trials=5,
                         executions_per_trial=3,
                         directory='./test',
                         project_name='helloworld')

    tuner.search_space_summary()

    tuner.search(x_train, y_train, epochs=5, validation_data=(x_test, y_test))

    print(tuner.results_summary())
def fixed_result_tuner(fixed_model_tmp_path):
    tmp_dir = str(fixed_model_tmp_path / "tmp")
    results_dir = str(fixed_model_tmp_path / "results")
    export_dir = str(fixed_model_tmp_path / "export")

    # Random data to feed the model.

    x_train = []
    y_train = []

    for idx in range(100):
        if idx % 2 == 0:
            x_train.append([0, 1])
            y_train.append([0, 1])
        else:
            x_train.append([1, 0])
            y_train.append([1, 0])

    for idx in range(10):
        if idx % 2 == 0:
            x_train.append([0, 1])
            y_train.append([1, 0])
        else:
            x_train.append([1, 0])
            y_train.append([0, 1])

    x_train = np.array(x_train, dtype=np.float32)
    y_train = np.array(y_train, dtype=np.float32)

    # Initialize the hypertuner by passing the model function (model_fn)
    # and specifying key search constraints: maximize val_acc (objective),
    # spend 9 epochs doing the search, spend at most 3 epoch on each model.
    tuner = RandomSearch(fixed_model_fn,
                         objective='val_acc',
                         epoch_budget=100,
                         max_epochs=10,
                         results_dir=results_dir,
                         tmp_dir=tmp_dir,
                         export_dir=export_dir)

    # display search overview
    tuner.summary()

    # You can use http://keras-tuner.appspot.com to track results on the web,
    # and get notifications. To do so, grab an API key on that site, and fill
    # it here.
    # tuner.enable_cloud(api_key=api_key)

    # Perform the model search. The search function has the same prototype than
    # keras.Model.fit(). Similarly search_generator() mirror
    # search_generator().
    tuner.search(x_train, y_train, validation_data=(x_train, y_train))

    return tuner
def get_tuner(which_tuner, input_shape, exp_name: str):
    tuners = {
        'hyperband':
        Hyperband(HyperPhenomenet(input_shape),
                  objective=kerastuner.Objective("val_precision",
                                                 direction="max"),
                  directory='hyperband_' + exp_name,
                  project_name=exp_name,
                  max_epochs=100),
        'random_search':
        RandomSearch(HyperPhenomenet(input_shape),
                     objective=kerastuner.Objective("val_precision",
                                                    direction="max"),
                     directory='keras_tuner_' + exp_name,
                     project_name=exp_name,
                     max_trials=100)
    }
    return tuners[which_tuner]
Exemple #30
0
def find_best_NN(x_train, y_train):
  # создаю тюнер, который сможет подобрать оптимальную архитектуру модели
  tuner = RandomSearch(build_model, objective="val_mae", max_trials=40, executions_per_trial=1,)
  print("\n\n\n")
  # начинается автоматический подбор гиперпараметров
  print('[INFO] start searching')
  tuner.search(x_train, y_train, batch_size=500, epochs=150, validation_split=0.3)
  # выбираем лучшую модель
  print("\n\n\nRESULTS SUMMARY")
  tuner.results_summary()
  print("\n\n\n")
  # получаем лучшую модель
  print("\n\n\nHERE IS THE BEST MODEL\n\n\n")
  best_params = tuner.get_best_hyperparameters()[0]
  best_model = tuner.hypermodel.build(best_params)
  best_model.summary()
  return best_model