Ejemplo n.º 1
0
def build_v1(hp: kt.HyperParameters, base_feature_size: int = 0):
    spectral_size = hp.Choice("spectral_size",
                              values=[8, 16, 32, 64],
                              ordered=True)
    dropout_rate = hp.Float("dropout_rate", 0.0, 0.8, step=0.1)
    output_units = hp.Choice("embedding_size", [8, 16, 32, 64, 128],
                             ordered=True)
    hidden_units = hp.Choice("hidden_units",
                             values=[32, 64, 128, 256, 512],
                             ordered=True)
    hidden_layers = hp.Int("hidden_layers", min_value=1, max_value=3)
    spec = tf.TensorSpec(
        (
            None,
            spectral_size + base_feature_size,
        ),
        dtype=tf.float32,
    )
    model = core.sgae(
        spec,
        functools.partial(
            mlp,
            output_units=output_units,
            hidden_units=(hidden_units, ) * hidden_layers,
            dropout_rate=dropout_rate,
        ),
    )
    _compile(hp, model)
    return model
def get_tuner(opDir, method, max_trials):
    hp = HyperParameters()
    hp.Int('n_sp_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('sp_hidden_nodes', [16, 32, 64, 128])
    hp.Int('n_mu_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('mu_hidden_nodes', [16, 32, 64, 128])
    hp.Int('n_smr_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('smr_hidden_nodes', [16, 32, 64, 128])
    hp.Choice('classification_loss_sp', ['binary_crossentropy', 'hinge'])
    hp.Choice('classification_loss_mu', ['binary_crossentropy', 'hinge'])

    # loss_weights = []
    # alpha = np.arange(0.1,0.8,0.1)
    # beta = 1-alpha
    # for i in range(len(beta)):
    #     gamma = np.arange(0.1, beta[i]-0.1, 0.1)
    #     for j in range(len(gamma)):
    #         beta_i = beta[i] - gamma[j]
    #         loss_weights.append([alpha[i], beta_i, gamma[j]])
    # loss_weights = np.round(loss_weights,1).tolist()
    # hp.Choice('loss_weights', loss_weights)
    hp.Fixed('TR_STEPS', PARAMS['TR_STEPS'])

    misc.print_model_summary(opDir + '/model_summary.txt',
                             get_Lemaire_MTL_model(hp))

    if method == 'RandomSearch':
        tuner = RandomSearch(
            get_Lemaire_MTL_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_MTL_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    elif method == 'BayesianOptimization':
        tuner = BayesianOptimization(
            get_Lemaire_MTL_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_MTL_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    return tuner
Ejemplo n.º 3
0
def Choice(
    hp: kt.HyperParameters,
    values: tp.Sequence,
    name: str,
    ordered: tp.Optional[bool] = None,
    default: tp.Optional[tp.Any] = None,
    parent_name: tp.Optional[str] = None,
    parent_values: tp.Optional[tp.Any] = None,
):
    return hp.Choice(
        values=values,
        name=name,
        ordered=ordered,
        default=default,
        parent_name=parent_name,
        parent_values=parent_values,
    )
def get_tuner(opDir, method, max_trials):
    hp = HyperParameters()
    hp.Int('kernel_size', min_value=3, max_value=19, step=2)  # 9
    hp.Int('Nd', min_value=3, max_value=8, step=1)  # 6
    hp.Int('nb_stacks', min_value=3, max_value=10, step=1)  # 8
    hp.Int('n_layers', min_value=1, max_value=4, step=1)  # 4
    hp.Choice('n_filters', [8, 16, 32])  # 3
    hp.Boolean('skip_some_connections')  # 2
    hp.Fixed('TR_STEPS', PARAMS['TR_STEPS'])

    if method == 'RandomSearch':
        tuner = RandomSearch(
            get_Lemaire_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    elif method == 'BayesianOptimization':
        tuner = BayesianOptimization(
            get_Lemaire_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    return tuner
Ejemplo n.º 5
0
(such as the learning rate), you can do so by passing a `hyperparameters` argument
to the tuner constructor, as well as `tune_new_entries=False` to specify that parameters
that you didn't list in `hyperparameters` should not be tuned. For these parameters, the default
value gets used.
"""

from keras_tuner import HyperParameters
from keras_tuner.applications import HyperXception

hypermodel = HyperXception(input_shape=(28, 28, 1), classes=10)

hp = HyperParameters()

# This will override the `learning_rate` parameter with your
# own selection of choices
hp.Choice("learning_rate", values=[1e-2, 1e-3, 1e-4])

tuner = RandomSearch(
    hypermodel,
    hyperparameters=hp,
    # `tune_new_entries=False` prevents unlisted parameters from being tuned
    tune_new_entries=False,
    objective="val_accuracy",
    max_trials=3,
    overwrite=True,
    directory="my_dir",
    project_name="helloworld",
)

tuner.search(x_train[:100],
             y_train[:100],
Ejemplo n.º 6
0
DEFAULT_ARCH = {
    "layers": [[
        "LSTM", {
            "units":
            DEFAULT_HP.Int(name='units',
                           min_value=32,
                           max_value=128,
                           step=32,
                           default=64),
            "return_sequences":
            False,
            "kernel_initializer":
            "glorot_uniform",
            "activation":
            DEFAULT_HP.Choice(name='LSTM_1_activation',
                              values=['relu', 'tanh', 'sigmoid', "linear"],
                              default='relu'),
        }
    ],
               [
                   "Dropout", {
                       "rate":
                       DEFAULT_HP.Float(name='dropout',
                                        min_value=0.0,
                                        max_value=0.5,
                                        default=0.2,
                                        step=0.05)
                   }
               ], ["Dense", {
                   "activation": "linear"
               }]]