def train(self, hparams=lib.get_default_hparams()):
    """Feeds the training data for training.

    Args:
      hparams: A namedtuple of hyperparameters. This function expects
      .train_epochs: a Python integer with the number of passes over the
        training dataset;
      .learning_rate: a Python float forwarded to the optimizer;
      .momentum: a Python float forwarded to the optimizer;
      .batch_size: a Python integer, number of samples per training step.

    Returns:
      The tf.keras.callbacks.History object returned by tf.keras.Model.fit*().
    """

    train_data_and_size = (self._gen_train_dataset(self.train_data,
                                                   hparams.batch_size),
                           self.train_data.size)
    validation_data_and_size = (self._gen_valid_dataset(self.valid_data,
                                                        hparams.batch_size),
                                self.valid_data.size)

    # Trains the models.
    return lib.train_model(self.model, hparams, train_data_and_size,
                           validation_data_and_size)
Exemplo n.º 2
0
    def train(self, train_data, validation_data=None, hparams=None):
        """Feeds the training data for training.

    Args:
      train_data: Training data.
      validation_data: Validation data. If None, skips validation process.
      hparams: A namedtuple of hyperparameters. This function expects
      .train_epochs: a Python integer with the number of passes over the
        training dataset;
      .learning_rate: a Python float forwarded to the optimizer;
      .momentum: a Python float forwarded to the optimizer;
      .batch_size: a Python integer, number of samples per training step.

    Returns:
      The tf.keras.callbacks.History object returned by tf.keras.Model.fit*().
    """
        hparams = self._get_hparams_or_default(hparams)

        train_ds = self._gen_dataset(train_data,
                                     hparams.batch_size,
                                     is_training=True)
        train_data_and_size = (train_ds, train_data.size)

        validation_ds = None
        validation_size = 0
        if validation_data is not None:
            validation_ds = self._gen_dataset(validation_data,
                                              hparams.batch_size,
                                              is_training=False)
            validation_size = validation_data.size
        validation_data_and_size = (validation_ds, validation_size)
        # Trains the models.
        return lib.train_model(self.model, hparams, train_data_and_size,
                               validation_data_and_size)