Example #1
0
    def test_enas_micro(self):
        hp_dict = {}
        space = enas_micro_search_space(arch='NNRNNR', hp_dict=hp_dict)
        assert len(hp_dict.items()) == 32
        assert space.combinations

        space.random_sample()
        model = space.keras_model()
        plot_model(model,
                   to_file=f'{test_output_dir}/test_enas_micro.png',
                   show_shapes=True)
Example #2
0
 def enas_space_fn():
     hp_dict = {}
     return enas_micro_search_space(arch='NNRNNR', hp_dict=hp_dict)
Example #3
0
from hypernets.searchers.mcts_searcher import MCTSSearcher
from hypernets.searchers.random_searcher import RandomSearcher
from hyperkeras.search_space.enas_micro import enas_micro_search_space
from hyperkeras.one_shot_model import OneShotModel

(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
# Rescale the images from [0,255] to the [0.0,1.0] range.
x_train, x_test = x_train[..., np.newaxis] / 255.0, x_test[..., np.newaxis] / 255.0
y_train = tf.keras.utils.to_categorical(y_train)
y_test = tf.keras.utils.to_categorical(y_test)
print("Number of original training examples:", len(x_train))
print("Number of original test examples:", len(x_test))
# sample for speed up
samples = 100

searcher = MCTSSearcher(lambda: enas_micro_search_space(arch='NNRNNR', hp_dict={}), optimize_direction='max')
one_shot_sampler = RandomSearcher(lambda: enas_micro_search_space(arch='NNRNNR', hp_dict={}), optimize_direction='max')

model = OneShotModel(searcher,
                     optimizer='adam',
                     loss='categorical_crossentropy',
                     metrics=['accuracy'],
                     epochs=3,
                     batch_size=64,
                     controller_train_per_epoch=False,  # Single path
                     callbacks=[SummaryCallback()],
                     one_shot_train_sampler=one_shot_sampler,  # uniform sampler
                     visualization=False)
# model.search(x_train[:samples], y_train[:samples], x_test[:int(samples / 10)], y_test[:int(samples / 10)],

model.search(x_train, y_train, x_test, y_test, max_trails=1000, epochs=100, callbacks=[])
Example #4
0
# model2 = space.keras_model(deepcopy=False)
# model2.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# model2.fit(x_train[:samples], y_train[:samples], batch_size=32)
# result2 = model.evaluate(x_train[:samples], y_train[:samples])
#
# weights_cache = LayerWeightsCache()
# space = enas_micro_search_space(arch='NR', hp_dict={}, use_input_placeholder=False, weights_cache=weights_cache)
# space.random_sample()
#
# model = SharingWeightModel(space)
# model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# model.fit(x_train[:samples], y_train[:samples], batch_size=32)
# result = model.evaluate(x_train[:samples], y_train[:samples])
#
# space = enas_micro_search_space(arch='NR', hp_dict={}, use_input_placeholder=False, weights_cache=weights_cache)
# space.random_sample()
# model.update_search_space(space)
# model.fit(x_train[:samples], y_train[:samples], batch_size=100)
# result = model.evaluate(x_train[:samples], y_train[:samples])

rs = RandomSearcher(
    lambda: enas_micro_search_space(arch='NNRNNR', hp_dict={}),
    optimize_direction='max')
hk = HyperKeras(rs, optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'],
                callbacks=[SummaryCallback()], one_shot_mode=True, visualization=False)

# tenserboard = TensorBoard('./tensorboard/run_enas')
hk.search(x_train[:samples], y_train[:samples], x_test[:int(samples / 10)], y_test[:int(samples / 10)],
          max_trails=100, epochs=1, callbacks=[])
assert hk.get