def get_tuner(opDir, method, max_trials):
    hp = HyperParameters()
    hp.Int('n_sp_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('sp_hidden_nodes', [16, 32, 64, 128])
    hp.Int('n_mu_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('mu_hidden_nodes', [16, 32, 64, 128])
    hp.Int('n_smr_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('smr_hidden_nodes', [16, 32, 64, 128])
    hp.Choice('classification_loss_sp', ['binary_crossentropy', 'hinge'])
    hp.Choice('classification_loss_mu', ['binary_crossentropy', 'hinge'])

    # loss_weights = []
    # alpha = np.arange(0.1,0.8,0.1)
    # beta = 1-alpha
    # for i in range(len(beta)):
    #     gamma = np.arange(0.1, beta[i]-0.1, 0.1)
    #     for j in range(len(gamma)):
    #         beta_i = beta[i] - gamma[j]
    #         loss_weights.append([alpha[i], beta_i, gamma[j]])
    # loss_weights = np.round(loss_weights,1).tolist()
    # hp.Choice('loss_weights', loss_weights)
    hp.Fixed('TR_STEPS', PARAMS['TR_STEPS'])

    misc.print_model_summary(opDir + '/model_summary.txt',
                             get_Lemaire_MTL_model(hp))

    if method == 'RandomSearch':
        tuner = RandomSearch(
            get_Lemaire_MTL_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_MTL_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    elif method == 'BayesianOptimization':
        tuner = BayesianOptimization(
            get_Lemaire_MTL_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_MTL_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    return tuner
Ejemplo n.º 2
0
def build_v1(hp: kt.HyperParameters, base_feature_size: int = 0):
    spectral_size = hp.Choice("spectral_size",
                              values=[8, 16, 32, 64],
                              ordered=True)
    dropout_rate = hp.Float("dropout_rate", 0.0, 0.8, step=0.1)
    output_units = hp.Choice("embedding_size", [8, 16, 32, 64, 128],
                             ordered=True)
    hidden_units = hp.Choice("hidden_units",
                             values=[32, 64, 128, 256, 512],
                             ordered=True)
    hidden_layers = hp.Int("hidden_layers", min_value=1, max_value=3)
    spec = tf.TensorSpec(
        (
            None,
            spectral_size + base_feature_size,
        ),
        dtype=tf.float32,
    )
    model = core.sgae(
        spec,
        functools.partial(
            mlp,
            output_units=output_units,
            hidden_units=(hidden_units, ) * hidden_layers,
            dropout_rate=dropout_rate,
        ),
    )
    _compile(hp, model)
    return model
def get_tuner(opDir, method, max_trials):
    hp = HyperParameters()
    hp.Int('kernel_size', min_value=3, max_value=19, step=2)  # 9
    hp.Int('Nd', min_value=3, max_value=8, step=1)  # 6
    hp.Int('nb_stacks', min_value=3, max_value=10, step=1)  # 8
    hp.Int('n_layers', min_value=1, max_value=4, step=1)  # 4
    hp.Choice('n_filters', [8, 16, 32])  # 3
    hp.Boolean('skip_some_connections')  # 2
    hp.Fixed('TR_STEPS', PARAMS['TR_STEPS'])

    if method == 'RandomSearch':
        tuner = RandomSearch(
            get_Lemaire_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    elif method == 'BayesianOptimization':
        tuner = BayesianOptimization(
            get_Lemaire_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    return tuner
Ejemplo n.º 4
0
def Int(
    hp: kt.HyperParameters,
    name: str,
    min_value: int,
    max_value: int,
    step: int = 1,
    sampling: tp.Optional[str] = None,
    default: tp.Optional[str] = None,
    parent_name: tp.Optional[str] = None,
    parent_values=None,
):
    return hp.Int(
        name=name,
        min_value=min_value,
        max_value=max_value,
        step=step,
        sampling=sampling,
        default=default,
        parent_name=parent_name,
        parent_values=parent_values,
    )
Ejemplo n.º 5
0
    hypermodel,
    hyperparameters=hp,
    # `tune_new_entries=False` prevents unlisted parameters from being tuned
    tune_new_entries=False,
    objective="val_accuracy",
    max_trials=3,
    overwrite=True,
    directory="my_dir",
    project_name="helloworld",
)

tuner.search(x_train[:100],
             y_train[:100],
             epochs=1,
             validation_data=(x_val[:100], y_val[:100]))
"""
## About parameter default values

Whenever you register a hyperparameter inside a model-building function or the `build` method of a hypermodel,
you can specify a default value:

```python
hp.Int("units", min_value=32, max_value=512, step=32, default=128)
```

If you don't, hyperparameters always have a default default (for `Int`, it is equal to `min_value`).

## Fixing values in a hypermodel

What if you want to do the reverse -- tune all available parameters in a hypermodel, **except** one (the learning rate)?
Ejemplo n.º 6
0
from keras_tuner import HyperParameters

DEFAULT_HP = HyperParameters()
DEFAULT_ARCH = {
    "layers": [[
        "LSTM", {
            "units":
            DEFAULT_HP.Int(name='units',
                           min_value=32,
                           max_value=128,
                           step=32,
                           default=64),
            "return_sequences":
            False,
            "kernel_initializer":
            "glorot_uniform",
            "activation":
            DEFAULT_HP.Choice(name='LSTM_1_activation',
                              values=['relu', 'tanh', 'sigmoid', "linear"],
                              default='relu'),
        }
    ],
               [
                   "Dropout", {
                       "rate":
                       DEFAULT_HP.Float(name='dropout',
                                        min_value=0.0,
                                        max_value=0.5,
                                        default=0.2,
                                        step=0.05)
                   }