示例#1
0
def default_hparams(input_shape, num_classes):

    hp = {}

    # [general]
    kernel_size = Range("kernel_size", 3, 5, 2, group="general")

    hp["kernel_size"] = (kernel_size, kernel_size)
    hp["initial_strides"] = (2, 2)
    hp["activation"] = Choice("activation", ["relu", "selu"], group="general")
    hp["learning_rate"] = Choice("learning_rate", [.001, .0001, .00001],
                                 group="general")

    # [entry flow]

    # -conv2d
    hp["conv2d_num_filters"] = Choice("num_filters", [32, 64, 128],
                                      group="conv2d")

    # seprarable block > not an exact match to the paper
    hp["sep_num_filters"] = Range("num_filters",
                                  128,
                                  768,
                                  128,
                                  group="entry_flow")

    # [Middle Flow]
    hp["num_residual_blocks"] = Range("num_residual_blocks",
                                      2,
                                      8,
                                      group="middle_flow")

    # [Exit Flow]
    hp["dense_merge_type"] = Choice("merge_type", ["avg", "flatten", "max"],
                                    group="exit_flow")

    hp["num_dense_layers"] = Range("dense_layers", 1, 3, group="exit_flow")

    hp["dropout_rate"] = Linear("dropout",
                                start=0.0,
                                stop=0.5,
                                num_buckets=6,
                                precision=1,
                                group="exit_flow")
    hp["dense_use_bn"] = Choice("batch_normalization", [True, False],
                                "exit_flow")
    return hp
def model_fn():
    "Model with hyper-parameters"

    # Hyper-parameters are defined as normal python variables
    DIMS = Range('dims', 2, 4, 2, group='layers')
    ACTIVATION = Choice('activation', ['relu', 'tanh'], group="layers")
    EXTRA_LAYER = Boolean('extra_layer', group="layers")
    LR = Choice('lr', [0.01, 0.001, 0.0001], group="optimizer")

    # converting a model to a tunable model is a simple as replacing static
    # values with the hyper parameters variables.
    model = Sequential()
    model.add(Dense(DIMS, input_shape=(1, )))
    model.add(Dense(DIMS, activation=ACTIVATION))
    if EXTRA_LAYER:
        model.add(Dense(DIMS, activation=ACTIVATION))
    model.add(Dense(1, activation='sigmoid'))
    optimizer = Adam(LR)
    model.compile(optimizer=optimizer,
                  loss="binary_crossentropy",
                  metrics=['acc'])
    return model
def test_range_incr_larger_than_range():
    with pytest.raises(ValueError):
        Range('test_range_incr_larger_than_range', 1, 3, 10)
def test_range_stop_larger_than_start():
    with pytest.raises(ValueError):
        Range('test_range_stop_larger_than_start', 3, 1)
def test_range_invalid_increment():
    with pytest.raises(ValueError):
        Range('test_range_invalid_increment', 1, 3, 'a')
def test_range_invalid_stop():
    with pytest.raises(ValueError):
        Range('test_range_invalid_stop', 1, 'a', 4)
def test_range_invalid_start():
    with pytest.raises(ValueError):
        Range('test_range_invalid_start', 'a', 3, 4)
def test_range():
    assert Range('test_range', 7, 20, 5) == 7