Exemplo n.º 1
0
    MyHyperModel(img_size=(28, 28), classes=10),
    objective="val_accuracy",
    max_trials=5,
    directory="test_dir",
    project_name="case3",
)

tuner.search(x, y=y, epochs=5, validation_data=(val_x, val_y))

# """Case #4:
# - We restrict the search space
# - This means that default values are being used for params that are left out
# """

hp = HyperParameters()
hp.Choice("learning_rate", [1e-1, 1e-3])

tuner = RandomSearch(
    build_model,
    max_trials=5,
    hyperparameters=hp,
    tune_new_entries=False,
    objective="val_accuracy",
    directory="test_dir",
    project_name="case4",
)

tuner.search(x=x, y=y, epochs=5, validation_data=(val_x, val_y))

# """Case #5:
# - We override specific parameters with fixed values that aren't the default
Exemplo n.º 2
0
    def pretrain(self,inputs,outputs,iteration):
        """
        Optimize the hyperparameters of the ANN.

        Args:
            inputs (np.array): All input data.
            outputs (np.array): All output data.
            iteration (int): Iteration number.

        Returns:
            best_hp (kerastuner.engine.hyperparameters.HyperParameters): Optimal hyperparameters.
        
        Notes:
            Optimization objective fixed on val_loss.
        """
        print("### Performing Keras Tuner optimization of the ANN###")
        # Select hyperparameters to tune
        hp = HyperParameters()
        valid_entries = ["activation","neurons","layers","learning_rate","regularization","sparsity"]
        if not all([entry in valid_entries for entry in self["optimize"]]):
            raise Exception("Invalid hyperparameters specified for optimization")
        
        if "activation" in self["optimize"]:
            hp.Choice("activation_function",["sigmoid","relu","swish","tanh"],default=self["activation"])
        if "neurons" in self["optimize"]:
            hp.Int("no_neurons",3,20,sampling="log",default=self["no_neurons"])
        if "layers" in self["optimize"]:
            hp.Int("no_hid_layers",1,6,default=self["no_layers"])
        if "learning_rate" in self["optimize"]:
            hp.Float("learning_rate",0.001,0.1,sampling="log",default=self["learning_rate"])
        if "regularization" in self["optimize"]:
            hp.Float("regularization",0.0001,0.01,sampling="log",default=self["kernel_regularizer"])
        if "sparsity" in self["optimize"]:
            hp.Float("sparsity",0.3,0.9,default=self["sparsity"])

        no_hps = len(hp._space)

        # In case none are chosen, only 1 run for fixed setting
        if no_hps == 0:
            hp.Fixed("no_neurons",self["no_neurons"])
        
        # Load tuner
        max_trials = self["max_trials"]*no_hps
        path_tf_format = "logs"

        time = datetime.now().strftime("%Y%m%d_%H%M")

        tuner_args = {"objective":"val_loss","hyperparameters":hp,"max_trials":max_trials,
                      "executions_per_trial":self["executions_per_trial"],"directory":path_tf_format,
                      "overwrite":True,"tune_new_entries":False,"project_name":f"opt"}
##                      "overwrite":True,"tune_new_entries":False,"project_name":f"opt_{time}"}

        if self["tuner"] == "random" or no_hps==0:
            tuner = RandomSearchCV(self.build_hypermodel,**tuner_args)            
        elif self["tuner"] == "bayesian":
            tuner = BayesianOptimizationCV(self.build_hypermodel,num_initial_points=3*no_hps,**tuner_args)

        # Load callbacks and remove early stopping
        callbacks_all = self.get_callbacks()
        callbacks = [call for call in callbacks_all if not isinstance(call,keras.callbacks.EarlyStopping)]

        # Optimize
        tuner.search(inputs,outputs,epochs=self["tuner_epochs"],verbose=0,shuffle=True,
                     callbacks=callbacks,iteration_no=iteration)

        # Retrieve and save best model
        best_hp = tuner.get_best_hyperparameters()[0]
        self.write_stats([best_hp.values],"ann_best_models")

        # Make a table of tuner stats
        scores = [tuner.oracle.trials[trial].score for trial in tuner.oracle.trials]
        hps = [tuner.oracle.trials[trial].hyperparameters.values for trial in tuner.oracle.trials]
        for idx,entry in enumerate(hps):
            entry["score"] = scores[idx]
        self.write_stats(hps,"ann_tuner_stats")

        return best_hp
Exemplo n.º 3
0
    objective='val_accuracy',
    max_trials=5,
    directory='test_dir')

tuner.search(x,
             y=y,
             epochs=5,
             validation_data=(val_x, val_y))

# """Case #4:
# - We restrict the search space
# - This means that default values are being used for params that are left out
# """

hp = HyperParameters()
hp.Choice('learning_rate', [1e-1, 1e-3])

tuner = RandomSearch(
    build_model,
    max_trials=5,
    hyperparameters=hp,
    tune_new_entries=False,
    objective='val_accuracy')

tuner.search(x=x,
             y=y,
             epochs=5,
             validation_data=(val_x, val_y))

# """Case #5:
# - We override specific parameters with fixed values that aren't the default