Ejemplo n.º 1
0
def Fixed(
    hp: kt.HyperParameters,
    value,
    parent_name: tp.Optional[str] = None,
    parent_values: tp.Optional[tp.Any] = None,
):
    return hp.Fixed(value=value,
                    parent_name=parent_name,
                    parent_values=parent_values)
def get_tuner(opDir, method, max_trials):
    hp = HyperParameters()
    hp.Int('n_sp_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('sp_hidden_nodes', [16, 32, 64, 128])
    hp.Int('n_mu_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('mu_hidden_nodes', [16, 32, 64, 128])
    hp.Int('n_smr_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('smr_hidden_nodes', [16, 32, 64, 128])
    hp.Choice('classification_loss_sp', ['binary_crossentropy', 'hinge'])
    hp.Choice('classification_loss_mu', ['binary_crossentropy', 'hinge'])

    # loss_weights = []
    # alpha = np.arange(0.1,0.8,0.1)
    # beta = 1-alpha
    # for i in range(len(beta)):
    #     gamma = np.arange(0.1, beta[i]-0.1, 0.1)
    #     for j in range(len(gamma)):
    #         beta_i = beta[i] - gamma[j]
    #         loss_weights.append([alpha[i], beta_i, gamma[j]])
    # loss_weights = np.round(loss_weights,1).tolist()
    # hp.Choice('loss_weights', loss_weights)
    hp.Fixed('TR_STEPS', PARAMS['TR_STEPS'])

    misc.print_model_summary(opDir + '/model_summary.txt',
                             get_Lemaire_MTL_model(hp))

    if method == 'RandomSearch':
        tuner = RandomSearch(
            get_Lemaire_MTL_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_MTL_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    elif method == 'BayesianOptimization':
        tuner = BayesianOptimization(
            get_Lemaire_MTL_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_MTL_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    return tuner
def get_tuner(opDir, method, max_trials):
    hp = HyperParameters()
    hp.Int('kernel_size', min_value=3, max_value=19, step=2)  # 9
    hp.Int('Nd', min_value=3, max_value=8, step=1)  # 6
    hp.Int('nb_stacks', min_value=3, max_value=10, step=1)  # 8
    hp.Int('n_layers', min_value=1, max_value=4, step=1)  # 4
    hp.Choice('n_filters', [8, 16, 32])  # 3
    hp.Boolean('skip_some_connections')  # 2
    hp.Fixed('TR_STEPS', PARAMS['TR_STEPS'])

    if method == 'RandomSearch':
        tuner = RandomSearch(
            get_Lemaire_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    elif method == 'BayesianOptimization':
        tuner = BayesianOptimization(
            get_Lemaire_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    return tuner
Ejemplo n.º 4
0
hp.Int("units", min_value=32, max_value=512, step=32, default=128)
```

If you don't, hyperparameters always have a default default (for `Int`, it is equal to `min_value`).

## Fixing values in a hypermodel

What if you want to do the reverse -- tune all available parameters in a hypermodel, **except** one (the learning rate)?

Pass a `hyperparameters` argument with a `Fixed` entry (or any number of `Fixed` entries), and specify `tune_new_entries=True`.
"""

hypermodel = HyperXception(input_shape=(28, 28, 1), classes=10)

hp = HyperParameters()
hp.Fixed("learning_rate", value=1e-4)

tuner = RandomSearch(
    hypermodel,
    hyperparameters=hp,
    tune_new_entries=True,
    objective="val_accuracy",
    max_trials=3,
    overwrite=True,
    directory="my_dir",
    project_name="helloworld",
)

tuner.search(x_train[:100],
             y_train[:100],
             epochs=1,