Ejemplo n.º 1
0
    def __call__(self, features, mode, params):
        """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec."""
        hparams = copy.deepcopy(self._base_hparams)
        if "batch_size" in params:
            hparams.batch_size = params["batch_size"]

        model = self._model_class(features, hparams, mode)
        model.build()

        # Possibly create train_op.
        use_tpu = self._use_tpu
        train_op = None
        if mode == tf.estimator.ModeKeys.TRAIN:
            learning_rate = training.create_learning_rate(
                hparams, model.global_step)
            optimizer = training.create_optimizer(hparams, learning_rate,
                                                  use_tpu)
            train_op = training.create_train_op(model, optimizer)

        if use_tpu:
            estimator = tf.contrib.tpu.TPUEstimatorSpec(mode=mode,
                                                        loss=model.total_loss,
                                                        train_op=train_op)
        else:
            estimator = tf.estimator.EstimatorSpec(mode=mode,
                                                   loss=model.total_loss,
                                                   train_op=train_op)

        return estimator
Ejemplo n.º 2
0
  def __call__(self, features, mode, params):
    """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec."""
    hparams = copy.deepcopy(self._base_hparams)
    if "batch_size" in params:
      hparams.batch_size = params["batch_size"]

    model = self._model_class(features, hparams, mode)
    model.build()

    # Possibly create train_op.
    use_tpu = self._use_tpu
    train_op = None
    if mode == tf.estimator.ModeKeys.TRAIN:
      learning_rate = training.create_learning_rate(hparams, model.global_step)
      optimizer = training.create_optimizer(hparams, learning_rate, use_tpu)
      train_op = training.create_train_op(model, optimizer)

    if use_tpu:
      estimator = tf.contrib.tpu.TPUEstimatorSpec(
          mode=mode, loss=model.total_loss, train_op=train_op)
    else:
      estimator = tf.estimator.EstimatorSpec(
          mode=mode, loss=model.total_loss, train_op=train_op)

    return estimator
Ejemplo n.º 3
0
  def __call__(self, features, mode):
    """Builds the model and returns an EstimatorSpec."""
    model = self.model_class(features, self.hparams, mode)
    model.build()

    # Possibly create train_op.
    train_op = None
    if mode == tf.estimator.ModeKeys.TRAIN:
      learning_rate = create_learning_rate(self.hparams, model.global_step)
      optimizer = training.create_optimizer(self.hparams, learning_rate)
      train_op = training.create_train_op(model, optimizer)

    # Possibly create evaluation metrics.
    eval_metrics = None
    if mode == tf.estimator.ModeKeys.EVAL:
      eval_metrics = {
        "num_examples": sum_metric(tf.ones_like(model.label, dtype=tf.int32)),
        "num_eval_batches": sum_metric(1),
        "rmse": tf.metrics.root_mean_squared_error(
            model.label, model.predicted_rv),
        "root_mean_label": tf.metrics.root_mean_squared_error(
            model.label, tf.zeros_like(model.label)),
        "root_mean_pred": tf.metrics.root_mean_squared_error(
            model.predicted_rv, tf.zeros_like(model.predicted_rv)),
      }

    return tf.estimator.EstimatorSpec(
        mode=mode,
        predictions=model.predicted_rv,
        loss=model.total_loss,
        train_op=train_op,
	eval_metric_ops=eval_metrics)
Ejemplo n.º 4
0
    def __call__(self, features, labels, mode, params):
        """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec."""
        hparams = copy.deepcopy(self._base_hparams)
        if "batch_size" in params:
            hparams.batch_size = params["batch_size"]

        # Allow labels to be passed in the features dictionary.
        if "labels" in features:
            if labels is not None and labels is not features["labels"]:
                raise ValueError(
                    "Conflicting labels: features['labels'] = {}, labels = {}".
                    format(features["labels"], labels))
            labels = features.pop("labels")

        model = self._model_class(features, labels, hparams, mode)
        model.build()

        # Possibly create train_op.
        use_tpu = self._use_tpu
        train_op = None
        if mode == tf.estimator.ModeKeys.TRAIN:
            learning_rate = training.create_learning_rate(
                hparams, model.global_step)
            optimizer = training.create_optimizer(hparams, learning_rate,
                                                  use_tpu)
            train_op = training.create_train_op(model, optimizer)

        # Possibly create evaluation metrics.
        eval_metrics = None
        if mode == tf.estimator.ModeKeys.EVAL:
            eval_metrics = (metrics.create_metric_fn(model)
                            if use_tpu else metrics.create_metrics(model))

        if use_tpu:
            estimator = tf.contrib.tpu.TPUEstimatorSpec(
                mode=mode,
                predictions=model.predictions,
                loss=model.total_loss,
                train_op=train_op,
                eval_metrics=eval_metrics)
        else:
            estimator = tf.estimator.EstimatorSpec(
                mode=mode,
                predictions=model.predictions,
                loss=model.total_loss,
                train_op=train_op,
                eval_metric_ops=eval_metrics)

        return estimator
Ejemplo n.º 5
0
  def __call__(self, features, labels, mode, params):
    """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec."""
    hparams = copy.deepcopy(self._base_hparams)
    if "batch_size" in params:
      hparams.batch_size = params["batch_size"]

    # Allow labels to be passed in the features dictionary.
    if "labels" in features:
      if labels is not None and labels is not features["labels"]:
        raise ValueError(
            "Conflicting labels: features['labels'] = {}, labels = {}".format(
                features["labels"], labels))
      labels = features.pop("labels")

    model = self._model_class(features, labels, hparams, mode)
    model.build()

    # Possibly create train_op.
    use_tpu = self._use_tpu
    train_op = None
    if mode == tf.estimator.ModeKeys.TRAIN:
      learning_rate = training.create_learning_rate(hparams, model.global_step)
      optimizer = training.create_optimizer(hparams, learning_rate, use_tpu)
      train_op = training.create_train_op(model, optimizer)

    # Possibly create evaluation metrics.
    eval_metrics = None
    if mode == tf.estimator.ModeKeys.EVAL:
      eval_metrics = (
          metrics.create_metric_fn(model)
          if use_tpu else metrics.create_metrics(model))

    if use_tpu:
      estimator = tf.contrib.tpu.TPUEstimatorSpec(
          mode=mode,
          predictions=model.predictions,
          loss=model.total_loss,
          train_op=train_op,
          eval_metrics=eval_metrics)
    else:
      estimator = tf.estimator.EstimatorSpec(
          mode=mode,
          predictions=model.predictions,
          loss=model.total_loss,
          train_op=train_op,
          eval_metric_ops=eval_metrics)

    return estimator
Ejemplo n.º 6
0
    def model_fn(features, labels, mode, params):
        """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec."""
        # For TPUEstimator, params contains the batch size per TPU core.
        if "batch_size" in params:
            hparams.batch_size = params["batch_size"]

        # Allow labels to be passed in the features dictionary.
        if "labels" in features:
            if labels is not None and labels is not features["labels"]:
                raise ValueError(
                    "Conflicting labels: features['labels'] = %s, labels = %s"
                    % (features["labels"], labels))
            labels = features.pop("labels")

        model = model_class(features, labels, hparams, mode)
        model.build()

        # Possibly create train_op.
        train_op = None
        if mode == tf.estimator.ModeKeys.TRAIN:
            learning_rate = training.create_learning_rate(
                hparams, model.global_step)
            optimizer = training.create_optimizer(hparams, learning_rate,
                                                  use_tpu)
            train_op = training.create_train_op(model, optimizer)

        # Possibly create evaluation metrics.
        eval_metrics = None
        if mode == tf.estimator.ModeKeys.EVAL:
            eval_metrics = (metrics.create_metric_fn(model)
                            if use_tpu else metrics.create_metrics(model))

        if use_tpu:
            estimator = tf.compat.v1.estimator.tpu.TPUEstimatorSpec(
                mode=mode,
                predictions=model.predictions,
                loss=model.total_loss,
                train_op=train_op,
                eval_metrics=eval_metrics)
        else:
            estimator = tf.estimator.EstimatorSpec(
                mode=mode,
                predictions=model.predictions,
                loss=model.total_loss,
                train_op=train_op,
                eval_metric_ops=eval_metrics)

        return estimator
Ejemplo n.º 7
0
    def __call__(self, features, mode, params):
        """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec."""
        hparams = copy.deepcopy(self._base_hparams)
        if "batch_size" in params:
            hparams.batch_size = params["batch_size"]

        model = self._model_class(features, hparams, mode)
        model.build()

        # Possibly create train_op.
        use_tpu = self._use_tpu
        train_op = None
        if mode == tf.estimator.ModeKeys.TRAIN:
            learning_rate = training.create_learning_rate(
                hparams, model.global_step)
            optimizer = training.create_optimizer(hparams, learning_rate,
                                                  use_tpu)
            train_op = training.create_train_op(model, optimizer)

        if use_tpu:
            estimator_spec = tf.contrib.tpu.TPUEstimatorSpec
        else:
            estimator_spec = tf.estimator.EstimatorSpec

        # Predictions are the Tensors produced by Estimator.predict(). Losses and
        # targets make sense even in predict mode because the model is
        # autoregressive.
        predictions = {
            "target": model.autoregressive_target,
            "seq_weights": model.weights,
            "seq_losses": model.batch_losses,
            "mean_loss": model.per_example_loss,
            "network_output": model.network_output,
        }
        predictions.update(model.features)
        predictions.update(model.dist_params)

        return estimator_spec(mode=mode,
                              predictions=predictions,
                              loss=model.total_loss,
                              train_op=train_op)
Ejemplo n.º 8
0
  def model_fn(features, labels, mode, params):
    """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec."""
    # For TPUEstimator, params contains the batch size per TPU core.
    if "batch_size" in params:
      hparams.batch_size = params["batch_size"]

    model = model_class(features, labels, hparams, mode)
    model.build()

    # Possibly create train_op.
    train_op = None
    if mode == tf.estimator.ModeKeys.TRAIN:
      learning_rate = training.create_learning_rate(hparams, model.global_step)
      optimizer = training.create_optimizer(hparams, learning_rate, use_tpu)
      train_op = training.create_train_op(model, optimizer)

    # Possibly create evaluation metrics.
    eval_metrics = None
    if mode == tf.estimator.ModeKeys.EVAL:
      eval_metrics = (
          metrics.create_metric_fn(model)
          if use_tpu else metrics.create_metrics(model))

    if use_tpu:
      estimator = tf.contrib.tpu.TPUEstimatorSpec(
          mode=mode,
          predictions=model.predictions,
          loss=model.total_loss,
          train_op=train_op,
          eval_metrics=eval_metrics)
    else:
      estimator = tf.estimator.EstimatorSpec(
          mode=mode,
          predictions=model.predictions,
          loss=model.total_loss,
          train_op=train_op,
          eval_metric_ops=eval_metrics)

    return estimator