def test_fit_one_shot_model_epoch(self): rs = RandomSearcher(self.get_space_simple, optimize_direction=OptimizeDirection.Maximize) hk = HyperKeras(rs, optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'], callbacks=[SummaryCallback()], one_shot_mode=True, one_shot_train_sampler=rs) x, y = self.get_x_y_1() hk.fit_one_shot_model_epoch(x, y)
def test_build_dataset_iter(self): rs = RandomSearcher(self.get_space, optimize_direction=OptimizeDirection.Maximize) hk = HyperKeras(rs, optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'], callbacks=[SummaryCallback()]) x, y = self.get_x_y_1() ds_iter = hk.build_dataset_iter(x, y, batch_size=10) batch_counter = 0 for x_b, y_b in ds_iter: # x_b, y_b = next() assert len(x_b) == 10 assert len(y_b) == 10 batch_counter += 1 assert batch_counter == 10 ds_iter = hk.build_dataset_iter(x, y, batch_size=32) batch_counter = 0 for x_b, y_b in ds_iter: # x_b, y_b = next() if batch_counter < 3: assert len(x_b) == 32 assert len(y_b) == 32 else: assert len(x_b) == 4 assert len(y_b) == 4 batch_counter += 1 assert batch_counter == 4 ds_iter = hk.build_dataset_iter(x, y, batch_size=32, repeat_count=2) batch_counter = 0 for x_b, y_b in ds_iter: # x_b, y_b = next() if batch_counter < 6: assert len(x_b) == 32 assert len(y_b) == 32 else: assert len(x_b) == 8 assert len(y_b) == 8 batch_counter += 1 assert batch_counter == 7
def test_model_with_hp(self): rs = RandomSearcher(self.get_space, optimize_direction=OptimizeDirection.Maximize) hk = HyperKeras(rs, optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'], callbacks=[SummaryCallback()]) x, y = self.get_x_y() hk.search(x, y, x, y, max_trails=3) best_trial = hk.get_best_trail() estimator = hk.final_train(best_trial.space_sample, x, y) score = estimator.predict(x) result = estimator.evaluate(x, y) assert len(score) == 100 assert result
# model2 = space.keras_model(deepcopy=False) # model2.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) # model2.fit(x_train[:samples], y_train[:samples], batch_size=32) # result2 = model.evaluate(x_train[:samples], y_train[:samples]) # # weights_cache = LayerWeightsCache() # space = enas_micro_search_space(arch='NR', hp_dict={}, use_input_placeholder=False, weights_cache=weights_cache) # space.random_sample() # # model = SharingWeightModel(space) # model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) # model.fit(x_train[:samples], y_train[:samples], batch_size=32) # result = model.evaluate(x_train[:samples], y_train[:samples]) # # space = enas_micro_search_space(arch='NR', hp_dict={}, use_input_placeholder=False, weights_cache=weights_cache) # space.random_sample() # model.update_search_space(space) # model.fit(x_train[:samples], y_train[:samples], batch_size=100) # result = model.evaluate(x_train[:samples], y_train[:samples]) rs = RandomSearcher( lambda: enas_micro_search_space(arch='NNRNNR', hp_dict={}), optimize_direction='max') hk = HyperKeras(rs, optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'], callbacks=[SummaryCallback()], one_shot_mode=True, visualization=False) # tenserboard = TensorBoard('./tensorboard/run_enas') hk.search(x_train[:samples], y_train[:samples], x_test[:int(samples / 10)], y_test[:int(samples / 10)], max_trails=100, epochs=1, callbacks=[]) assert hk.get