Пример #1
0
def test_input_tensor():
    hp = hp_module.HyperParameters()
    inputs = tf.keras.Input(shape=(256, 256, 3))
    hypermodel = efficientnet.HyperEfficientNet(input_tensor=inputs,
                                                classes=10)
    model = hypermodel.build(hp)
    assert model.inputs == [inputs]
Пример #2
0
def test_augmentation_param_fixed_model():
    hp = hp_module.HyperParameters()
    aug_model = tf.keras.Sequential(name="aug")
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(32, 32, 3),
                                                classes=10,
                                                augmentation_model=aug_model)
    model = hypermodel.build(hp)
    assert model.layers[1].name == "aug"
Пример #3
0
def test_hyperparameter_override():
    hp = hp_module.HyperParameters()
    hp.Choice("version", ["B1"])
    hp.Fixed("top_dropout_rate", 0.5)
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(256, 256, 3),
                                                classes=10)
    hypermodel.build(hp)
    assert hp.get("version") == "B1"
    assert hp.get("top_dropout_rate") == 0.5
Пример #4
0
def test_hyperparameter_existence_and_defaults():
    hp = hp_module.HyperParameters()
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(224, 224, 3),
                                                classes=10)
    hypermodel.build(hp)
    assert hp.get("version") == "B0"
    assert hp.get("top_dropout_rate") == 0.2
    assert hp.get("learning_rate") == 0.01
    assert hp.get("pooling") == "avg"
Пример #5
0
def test_hyperparameter_override():
    hp = hp_module.HyperParameters()
    hp.Choice('version', ['B1'])
    hp.Fixed('top_dropout_rate', 0.5)
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(256, 256, 3),
                                                classes=10)
    hypermodel.build(hp)
    assert hp.get('version') == 'B1'
    assert hp.get('top_dropout_rate') == 0.5
Пример #6
0
def test_hyperparameter_existence_and_defaults():
    hp = hp_module.HyperParameters()
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(224, 224, 3),
                                                classes=10)
    hypermodel.build(hp)
    assert hp.get('version') == 'B0'
    assert hp.get('top_dropout_rate') == 0.2
    assert hp.get('learning_rate') == 0.01
    assert hp.get('pooling') == 'avg'
Пример #7
0
def test_model_construction(version):
    hp = hp_module.HyperParameters()
    hp.Choice("version", [version])
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(32, 32, 3),
                                                classes=10)
    model = hypermodel.build(hp)
    assert hp.values["version"] == version
    assert model.layers
    assert model.name == "EfficientNet"
    assert model.output_shape == (None, 10)
    model.train_on_batch(np.ones((1, 32, 32, 3)), np.ones((1, 10)))
    out = model.predict(np.ones((1, 32, 32, 3)))
    assert out.shape == (1, 10)
Пример #8
0
def test_augmentation_param_hyper_model():
    class HyperAug(hm_module.HyperModel):
        def build(self, hp):
            model = tf.keras.Sequential(name="aug")
            scaling_factor = hp.Choice("scaling_factor", [1])
            model.add(tf.keras.layers.Lambda(lambda x: x * scaling_factor))
            return model

    hp = hp_module.HyperParameters()
    aug_hm = HyperAug()
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(32, 32, 3),
                                                classes=10,
                                                augmentation_model=aug_hm)
    model = hypermodel.build(hp)
    assert model.layers[1].name == "aug"
    assert hp.values["scaling_factor"] == 1
Пример #9
0
def test_augmentation_param_invalid_input():
    with pytest.raises(ValueError):
        efficientnet.HyperEfficientNet(input_shape=(32, 32, 3),
                                       classes=10,
                                       augmentation_model=0)