Ejemplo n.º 1
0
def _build_keras_model(hparams: keras_tuner.HyperParameters) -> tf.keras.Model:
    """Creates a DNN Keras model for classifying penguin data.

  Args:
    hparams: Holds HyperParameters for tuning.

  Returns:
    A Keras Model.
  """
    # The model below is built with Functional API, please refer to
    # https://www.tensorflow.org/guide/keras/overview for all API options.
    inputs = [
        keras.layers.Input(shape=(1, ), name=_transformed_name(f))
        for f in _FEATURE_KEYS
    ]
    d = keras.layers.concatenate(inputs)
    for _ in range(int(hparams.get('num_layers'))):
        d = keras.layers.Dense(8, activation='relu')(d)
    outputs = keras.layers.Dense(3, activation='softmax')(d)

    model = keras.Model(inputs=inputs, outputs=outputs)
    model.compile(optimizer=keras.optimizers.Adam(
        hparams.get('learning_rate')),
                  loss='sparse_categorical_crossentropy',
                  metrics=[keras.metrics.SparseCategoricalAccuracy()])

    model.summary(print_fn=logging.info)
    return model
Ejemplo n.º 2
0
def build_v1(hp: kt.HyperParameters, base_feature_size: int = 0):
    spectral_size = hp.Choice("spectral_size",
                              values=[8, 16, 32, 64],
                              ordered=True)
    dropout_rate = hp.Float("dropout_rate", 0.0, 0.8, step=0.1)
    output_units = hp.Choice("embedding_size", [8, 16, 32, 64, 128],
                             ordered=True)
    hidden_units = hp.Choice("hidden_units",
                             values=[32, 64, 128, 256, 512],
                             ordered=True)
    hidden_layers = hp.Int("hidden_layers", min_value=1, max_value=3)
    spec = tf.TensorSpec(
        (
            None,
            spectral_size + base_feature_size,
        ),
        dtype=tf.float32,
    )
    model = core.sgae(
        spec,
        functools.partial(
            mlp,
            output_units=output_units,
            hidden_units=(hidden_units, ) * hidden_layers,
            dropout_rate=dropout_rate,
        ),
    )
    _compile(hp, model)
    return model
def get_tuner(opDir, method, max_trials):
    hp = HyperParameters()
    hp.Int('n_sp_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('sp_hidden_nodes', [16, 32, 64, 128])
    hp.Int('n_mu_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('mu_hidden_nodes', [16, 32, 64, 128])
    hp.Int('n_smr_hidden_lyrs', min_value=1, max_value=3, step=1)
    hp.Choice('smr_hidden_nodes', [16, 32, 64, 128])
    hp.Choice('classification_loss_sp', ['binary_crossentropy', 'hinge'])
    hp.Choice('classification_loss_mu', ['binary_crossentropy', 'hinge'])

    # loss_weights = []
    # alpha = np.arange(0.1,0.8,0.1)
    # beta = 1-alpha
    # for i in range(len(beta)):
    #     gamma = np.arange(0.1, beta[i]-0.1, 0.1)
    #     for j in range(len(gamma)):
    #         beta_i = beta[i] - gamma[j]
    #         loss_weights.append([alpha[i], beta_i, gamma[j]])
    # loss_weights = np.round(loss_weights,1).tolist()
    # hp.Choice('loss_weights', loss_weights)
    hp.Fixed('TR_STEPS', PARAMS['TR_STEPS'])

    misc.print_model_summary(opDir + '/model_summary.txt',
                             get_Lemaire_MTL_model(hp))

    if method == 'RandomSearch':
        tuner = RandomSearch(
            get_Lemaire_MTL_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_MTL_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    elif method == 'BayesianOptimization':
        tuner = BayesianOptimization(
            get_Lemaire_MTL_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_MTL_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    return tuner
Ejemplo n.º 4
0
def _make_keras_model(hparams: kt.HyperParameters) -> tf.keras.Model:
  """Creates a TFDF Keras model for classifying penguin data.

  Args:
    hparams: Holds HyperParameters for tuning.

  Returns:
    A Keras Model.
  """
  return tfdf.keras.GradientBoostedTreesModel(
      max_depth=hparams.get('max_depth'),
      shrinkage=hparams.get('shrinkage'),
      use_hessian_gain=hparams.get('use_hessian_gain'))
Ejemplo n.º 5
0
def Fixed(
    hp: kt.HyperParameters,
    value,
    parent_name: tp.Optional[str] = None,
    parent_values: tp.Optional[tp.Any] = None,
):
    return hp.Fixed(value=value,
                    parent_name=parent_name,
                    parent_values=parent_values)
Ejemplo n.º 6
0
 def _verify_output(self):
     # Test best hparams.
     best_hparams_path = os.path.join(self._best_hparams.uri,
                                      'best_hyperparameters.txt')
     self.assertTrue(fileio.exists(best_hparams_path))
     best_hparams_config = json.loads(
         file_io.read_file_to_string(best_hparams_path))
     best_hparams = HyperParameters.from_config(best_hparams_config)
     self.assertIn(best_hparams.get('learning_rate'), (1e-1, 1e-3))
     self.assertBetween(best_hparams.get('num_layers'), 1, 5)
Ejemplo n.º 7
0
def test_VQVAE_hypermodel_build(
    latent_dim,
    n_components,
):
    deepof.hypermodels.VQVAE(
        latent_dim=latent_dim,
        input_shape=(
            100,
            15,
            10,
        ),
        n_components=n_components,
    ).build(hp=HyperParameters())
Ejemplo n.º 8
0
def Boolean(
    hp: kt.HyperParameters,
    name: str,
    default: bool = False,
    parent_name: tp.Optional[str] = None,
    parent_values: tp.Optional[tp.Any] = None,
) -> bool:
    return hp.Boolean(
        name=name,
        default=default,
        parent_name=parent_name,
        parent_values=parent_values,
    )
def get_tuner(opDir, method, max_trials):
    hp = HyperParameters()
    hp.Int('kernel_size', min_value=3, max_value=19, step=2)  # 9
    hp.Int('Nd', min_value=3, max_value=8, step=1)  # 6
    hp.Int('nb_stacks', min_value=3, max_value=10, step=1)  # 8
    hp.Int('n_layers', min_value=1, max_value=4, step=1)  # 4
    hp.Choice('n_filters', [8, 16, 32])  # 3
    hp.Boolean('skip_some_connections')  # 2
    hp.Fixed('TR_STEPS', PARAMS['TR_STEPS'])

    if method == 'RandomSearch':
        tuner = RandomSearch(
            get_Lemaire_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    elif method == 'BayesianOptimization':
        tuner = BayesianOptimization(
            get_Lemaire_model,
            hyperparameters=hp,
            objective='val_loss',
            max_trials=max_trials,
            executions_per_trial=2,
            overwrite=False,
            directory=opDir,
            project_name='B3_architecture_tuning_non_causal',
            tune_new_entries=True,
            allow_new_entries=True,
        )

    return tuner
Ejemplo n.º 10
0
def Choice(
    hp: kt.HyperParameters,
    values: tp.Sequence,
    name: str,
    ordered: tp.Optional[bool] = None,
    default: tp.Optional[tp.Any] = None,
    parent_name: tp.Optional[str] = None,
    parent_values: tp.Optional[tp.Any] = None,
):
    return hp.Choice(
        values=values,
        name=name,
        ordered=ordered,
        default=default,
        parent_name=parent_name,
        parent_values=parent_values,
    )
Ejemplo n.º 11
0
def Int(
    hp: kt.HyperParameters,
    name: str,
    min_value: int,
    max_value: int,
    step: int = 1,
    sampling: tp.Optional[str] = None,
    default: tp.Optional[str] = None,
    parent_name: tp.Optional[str] = None,
    parent_values=None,
):
    return hp.Int(
        name=name,
        min_value=min_value,
        max_value=max_value,
        step=step,
        sampling=sampling,
        default=default,
        parent_name=parent_name,
        parent_values=parent_values,
    )
Ejemplo n.º 12
0
"""
## You can easily restrict the search space to just a few parameters

If you have an existing hypermodel, and you want to search over only a few parameters
(such as the learning rate), you can do so by passing a `hyperparameters` argument
to the tuner constructor, as well as `tune_new_entries=False` to specify that parameters
that you didn't list in `hyperparameters` should not be tuned. For these parameters, the default
value gets used.
"""

from keras_tuner import HyperParameters
from keras_tuner.applications import HyperXception

hypermodel = HyperXception(input_shape=(28, 28, 1), classes=10)

hp = HyperParameters()

# This will override the `learning_rate` parameter with your
# own selection of choices
hp.Choice("learning_rate", values=[1e-2, 1e-3, 1e-4])

tuner = RandomSearch(
    hypermodel,
    hyperparameters=hp,
    # `tune_new_entries=False` prevents unlisted parameters from being tuned
    tune_new_entries=False,
    objective="val_accuracy",
    max_trials=3,
    overwrite=True,
    directory="my_dir",
    project_name="helloworld",
Ejemplo n.º 13
0
from keras_tuner import HyperParameters

DEFAULT_HP = HyperParameters()
DEFAULT_ARCH = {
    "layers": [[
        "LSTM", {
            "units":
            DEFAULT_HP.Int(name='units',
                           min_value=32,
                           max_value=128,
                           step=32,
                           default=64),
            "return_sequences":
            False,
            "kernel_initializer":
            "glorot_uniform",
            "activation":
            DEFAULT_HP.Choice(name='LSTM_1_activation',
                              values=['relu', 'tanh', 'sigmoid', "linear"],
                              default='relu'),
        }
    ],
               [
                   "Dropout", {
                       "rate":
                       DEFAULT_HP.Float(name='dropout',
                                        min_value=0.0,
                                        max_value=0.5,
                                        default=0.2,
                                        step=0.05)
                   }