def convert_optimizer_trial_to_hps( hps: hp_module.HyperParameters, optimizer_trial: Dict[Text, Any]) -> hp_module.HyperParameters: """Converts Optimizer Trial parameters into KerasTuner HyperParameters.""" hps = hp_module.HyperParameters.from_config(hps.get_config()) hps.values = {} for param in optimizer_trial["parameters"]: if "floatValue" in param: hps.values[param["parameter"]] = float(param["floatValue"]) if "intValue" in param: hps.values[param["parameter"]] = int(param["intValue"]) if "stringValue" in param: hps.values[param["parameter"]] = str(param["stringValue"]) return hps
def convert_vizier_trial_to_hps( hps: hp_module.HyperParameters, vizier_trial: Dict[Text, Any]) -> hp_module.HyperParameters: """Converts Vizier Trial parameters into KerasTuner HyperParameters. Args: hps: Sample KerasTuner HyperParameters object for config initialization vizier_trial: A Vizier Trial instance. Returns: A KerasTuner HyperParameters object that holds the Vizier Trial parameters. """ hps = hp_module.HyperParameters.from_config(hps.get_config()) hps.values = convert_vizier_trial_to_dict(vizier_trial) return hps
def _verify_output(self): # Test best hparams. best_hparams_path = os.path.join(self._best_hparams.uri, 'best_hyperparameters.txt') self.assertTrue(tf.io.gfile.exists(best_hparams_path)) with tf.io.gfile.GFile(best_hparams_path, mode='r') as f: best_hparams_config = json.loads(f.read()) best_hparams = HyperParameters.from_config(best_hparams_config) self.assertBetween(best_hparams.get('learning_rate'), 1e-4, 1.01) self.assertBetween(best_hparams.get('num_layers'), 1, 5)
def convert_hyperparams_to_hparams( hyperparams: hp_module.HyperParameters ) -> Dict[hparams_api.HParam, Any]: """Converts KerasTuner HyperParameters to TensorBoard HParams. Args: hyperparams: A KerasTuner HyperParameters instance Returns: A dict that maps TensorBoard HParams to current values. """ hparams = {} for hp in hyperparams.space: hparams_value = {} try: hparams_value = hyperparams.get(hp.name) except ValueError: continue hparams_domain = {} if isinstance(hp, hp_module.Choice): hparams_domain = hparams_api.Discrete(hp.values) elif isinstance(hp, hp_module.Int): if hp.step is None or hp.step == 1: hparams_domain = hparams_api.IntInterval( hp.min_value, hp.max_value) else: # Note: `hp.max_value` is inclusive, unlike the end index # of Python `range()`, which is exclusive values = list(range(hp.min_value, hp.max_value + 1, hp.step)) hparams_domain = hparams_api.Discrete(values) elif isinstance(hp, hp_module.Float): if hp.step is None: hparams_domain = hparams_api.RealInterval( hp.min_value, hp.max_value) else: # Note: `hp.max_value` is inclusive, which is also # the default for Numpy's linspace num_samples = int((hp.max_value - hp.min_value) / hp.step) end_value = hp.min_value + (num_samples * hp.step) values = np.linspace(hp.min_value, end_value, num_samples + 1).tolist() hparams_domain = hparams_api.Discrete(values) elif isinstance(hp, hp_module.Boolean): hparams_domain = hparams_api.Discrete([True, False]) elif isinstance(hp, hp_module.Fixed): hparams_domain = hparams_api.Discrete([hp.value]) else: raise ValueError( "`HyperParameter` type not recognized: {}".format(hp)) hparams_key = hparams_api.HParam(hp.name, hparams_domain) hparams[hparams_key] = hparams_value return hparams
def pretrain(self,inputs,outputs,iteration): """ Optimize the hyperparameters of the ANN. Args: inputs (np.array): All input data. outputs (np.array): All output data. iteration (int): Iteration number. Returns: best_hp (kerastuner.engine.hyperparameters.HyperParameters): Optimal hyperparameters. Notes: Optimization objective fixed on val_loss. """ print("### Performing Keras Tuner optimization of the ANN###") # Select hyperparameters to tune hp = HyperParameters() valid_entries = ["activation","neurons","layers","learning_rate","regularization","sparsity"] if not all([entry in valid_entries for entry in self["optimize"]]): raise Exception("Invalid hyperparameters specified for optimization") if "activation" in self["optimize"]: hp.Choice("activation_function",["sigmoid","relu","swish","tanh"],default=self["activation"]) if "neurons" in self["optimize"]: hp.Int("no_neurons",3,20,sampling="log",default=self["no_neurons"]) if "layers" in self["optimize"]: hp.Int("no_hid_layers",1,6,default=self["no_layers"]) if "learning_rate" in self["optimize"]: hp.Float("learning_rate",0.001,0.1,sampling="log",default=self["learning_rate"]) if "regularization" in self["optimize"]: hp.Float("regularization",0.0001,0.01,sampling="log",default=self["kernel_regularizer"]) if "sparsity" in self["optimize"]: hp.Float("sparsity",0.3,0.9,default=self["sparsity"]) no_hps = len(hp._space) # In case none are chosen, only 1 run for fixed setting if no_hps == 0: hp.Fixed("no_neurons",self["no_neurons"]) # Load tuner max_trials = self["max_trials"]*no_hps path_tf_format = "logs" time = datetime.now().strftime("%Y%m%d_%H%M") tuner_args = {"objective":"val_loss","hyperparameters":hp,"max_trials":max_trials, "executions_per_trial":self["executions_per_trial"],"directory":path_tf_format, "overwrite":True,"tune_new_entries":False,"project_name":f"opt"} ## "overwrite":True,"tune_new_entries":False,"project_name":f"opt_{time}"} if self["tuner"] == "random" or no_hps==0: tuner = RandomSearchCV(self.build_hypermodel,**tuner_args) elif self["tuner"] == "bayesian": tuner = BayesianOptimizationCV(self.build_hypermodel,num_initial_points=3*no_hps,**tuner_args) # Load callbacks and remove early stopping callbacks_all = self.get_callbacks() callbacks = [call for call in callbacks_all if not isinstance(call,keras.callbacks.EarlyStopping)] # Optimize tuner.search(inputs,outputs,epochs=self["tuner_epochs"],verbose=0,shuffle=True, callbacks=callbacks,iteration_no=iteration) # Retrieve and save best model best_hp = tuner.get_best_hyperparameters()[0] self.write_stats([best_hp.values],"ann_best_models") # Make a table of tuner stats scores = [tuner.oracle.trials[trial].score for trial in tuner.oracle.trials] hps = [tuner.oracle.trials[trial].hyperparameters.values for trial in tuner.oracle.trials] for idx,entry in enumerate(hps): entry["score"] = scores[idx] self.write_stats(hps,"ann_tuner_stats") return best_hp
tuner = RandomSearch( MyHyperModel(img_size=(28, 28), classes=10), objective="val_accuracy", max_trials=5, directory="test_dir", project_name="case3", ) tuner.search(x, y=y, epochs=5, validation_data=(val_x, val_y)) # """Case #4: # - We restrict the search space # - This means that default values are being used for params that are left out # """ hp = HyperParameters() hp.Choice("learning_rate", [1e-1, 1e-3]) tuner = RandomSearch( build_model, max_trials=5, hyperparameters=hp, tune_new_entries=False, objective="val_accuracy", directory="test_dir", project_name="case4", ) tuner.search(x=x, y=y, epochs=5, validation_data=(val_x, val_y)) # """Case #5:
batch_size=16, shuffle=True, seed=7, ) datagen_valid = datagen.flow_from_directory( "archive/val", color_mode="rgb", class_mode="categorical", target_size=(200, 200), batch_size=16, shuffle=True, seed=7, ) hp = HyperParameters() def build_model(hp): """ 建立模型 :return: 模型 """ image_input = Input(shape=(200, 200, 3)) layer1_1 = Conv2D(hp.Choice('num_filters_layer1_1', values=[16, 32, 64], default=16), (1, 1), activation='relu')(image_input) #TensorShape([None, 200, 200, 64]) layer2_1 = Conv2D(hp.Choice('num_filters_layer2_1',
MyHyperModel(img_size=(28, 28), num_classes=10), objective='val_accuracy', max_trials=5, directory='test_dir') tuner.search(x, y=y, epochs=5, validation_data=(val_x, val_y)) # """Case #4: # - We restrict the search space # - This means that default values are being used for params that are left out # """ hp = HyperParameters() hp.Choice('learning_rate', [1e-1, 1e-3]) tuner = RandomSearch( build_model, max_trials=5, hyperparameters=hp, tune_new_entries=False, objective='val_accuracy') tuner.search(x=x, y=y, epochs=5, validation_data=(val_x, val_y)) # """Case #5: