def _test_base_tuner(): def build_model(hp): return hp.Int("a", 1, 100) tuner = SimpleTuner( oracle=kt.oracles.RandomSearch(objective=kt.Objective( "score", "max"), max_trials=10), hypermodel=build_model, directory=tmp_dir, ) tuner.search() # Only worker makes it to this point, server runs until thread stops. assert dist_utils.has_chief_oracle() assert not dist_utils.is_chief_oracle() assert isinstance(tuner.oracle, kt.distribute.oracle_client.OracleClient) barrier.wait(60) # Model is just a score. scores = tuner.get_best_models(10) assert len(scores) assert scores == sorted(copy.copy(scores), reverse=True)
def __init__( self, oracle, hypermodel=None, directory=None, project_name=None, logger=None, overwrite=False, ): # Ops and metadata self.directory = directory or "." self.project_name = project_name or "untitled_project" if overwrite and tf.io.gfile.exists(self.project_dir): tf.io.gfile.rmtree(self.project_dir) if not isinstance(oracle, oracle_module.Oracle): raise ValueError( "Expected `oracle` argument to be an instance of `Oracle`. " f"Received: oracle={oracle} (of type ({type(oracle)}).") self.oracle = oracle self.oracle._set_project_dir(self.directory, self.project_name, overwrite=overwrite) # Run in distributed mode. if dist_utils.is_chief_oracle(): # Blocks forever. oracle_chief.start_server(self.oracle) elif dist_utils.has_chief_oracle(): # Proxies requests to the chief oracle. self.oracle = oracle_client.OracleClient(self.oracle) # To support tuning distribution. self.tuner_id = os.environ.get("KERASTUNER_TUNER_ID", "tuner0") self.hypermodel = hm_module.get_hypermodel(hypermodel) # Logs etc self.logger = logger self._display = tuner_utils.Display(oracle=self.oracle) self._populate_initial_space() if not overwrite and tf.io.gfile.exists(self._get_tuner_fname()): tf.get_logger().info("Reloading Tuner from {}".format( self._get_tuner_fname())) self.reload()
def _test_random_search(): def build_model(hp): model = keras.Sequential() model.add(keras.layers.Dense(3, input_shape=(5, ))) for i in range(hp.Int("num_layers", 1, 3)): model.add( keras.layers.Dense(hp.Int("num_units_%i" % i, 1, 3), activation="relu")) model.add(keras.layers.Dense(1, activation="sigmoid")) model.compile("sgd", "binary_crossentropy") return model x = np.random.uniform(-1, 1, size=(2, 5)) y = np.ones((2, 1)) tuner = kt.tuners.RandomSearch( hypermodel=build_model, objective="val_loss", max_trials=10, directory=tmp_dir, ) # Only worker makes it to this point, server runs until thread stops. assert dist_utils.has_chief_oracle() assert not dist_utils.is_chief_oracle() assert isinstance(tuner.oracle, kt.distribute.oracle_client.OracleClient) tuner.search(x, y, validation_data=(x, y), epochs=1, batch_size=2) # Suppress warnings about optimizer state not being restored by tf.keras. tf.get_logger().setLevel(logging.ERROR) trials = tuner.oracle.get_best_trials(2) assert trials[0].score <= trials[1].score models = tuner.get_best_models(2) assert models[0].evaluate(x, y) <= models[1].evaluate(x, y)