def test_model_with_hp(self): rs = RandomSearcher(self.get_space, optimize_direction=OptimizeDirection.Maximize) hk = HyperKeras(rs, optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'], callbacks=[SummaryCallback()]) x, y = self.get_x_y() hk.search(x, y, x, y, max_trails=3) best_trial = hk.get_best_trail() estimator = hk.final_train(best_trial.space_sample, x, y) score = estimator.predict(x) result = estimator.evaluate(x, y) assert len(score) == 100 assert result
# model2 = space.keras_model(deepcopy=False) # model2.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) # model2.fit(x_train[:samples], y_train[:samples], batch_size=32) # result2 = model.evaluate(x_train[:samples], y_train[:samples]) # # weights_cache = LayerWeightsCache() # space = enas_micro_search_space(arch='NR', hp_dict={}, use_input_placeholder=False, weights_cache=weights_cache) # space.random_sample() # # model = SharingWeightModel(space) # model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) # model.fit(x_train[:samples], y_train[:samples], batch_size=32) # result = model.evaluate(x_train[:samples], y_train[:samples]) # # space = enas_micro_search_space(arch='NR', hp_dict={}, use_input_placeholder=False, weights_cache=weights_cache) # space.random_sample() # model.update_search_space(space) # model.fit(x_train[:samples], y_train[:samples], batch_size=100) # result = model.evaluate(x_train[:samples], y_train[:samples]) rs = RandomSearcher( lambda: enas_micro_search_space(arch='NNRNNR', hp_dict={}), optimize_direction='max') hk = HyperKeras(rs, optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'], callbacks=[SummaryCallback()], one_shot_mode=True, visualization=False) # tenserboard = TensorBoard('./tensorboard/run_enas') hk.search(x_train[:samples], y_train[:samples], x_test[:int(samples / 10)], y_test[:int(samples / 10)], max_trails=100, epochs=1, callbacks=[]) assert hk.get