Ejemplo n.º 1
0
 def model_fn(features, labels, mode):
     _ = labels
     step = training.get_global_step()
     w = variable_scope.get_variable(
         'w',
         shape=[],
         initializer=init_ops.zeros_initializer(),
         dtype=dtypes.int64)
     if estimator_lib.ModeKeys.TRAIN == mode:
         # to consume features, we have control dependency
         with ops.control_dependencies([features]):
             step_inc = state_ops.assign_add(training.get_global_step(),
                                             1)
         with ops.control_dependencies([step_inc]):
             assign_w_to_step_plus_2 = w.assign(step + 2)
         return estimator_lib.EstimatorSpec(
             mode,
             loss=constant_op.constant(3.),
             train_op=assign_w_to_step_plus_2)
     if estimator_lib.ModeKeys.EVAL == mode:
         # to consume features, we have control dependency
         with ops.control_dependencies([features]):
             loss = constant_op.constant(5.)
         return estimator_lib.EstimatorSpec(
             mode,
             loss=loss,
             # w is constant in each step, so the mean.
             # w = 0 if step==0 else step+2
             eval_metric_ops={'mean_of_const': metrics_lib.mean(w)})
Ejemplo n.º 2
0
 def _model_fn(features, labels, mode):
     if mode == estimator.ModeKeys.TRAIN:
         loss, accuracy, var_list, hooks = model_fn[mode](features,
                                                          labels,
                                                          run_config)
         # Learning rate
         # todo organize lr and optimizer configuration
         learning_rate = run_config.learning_rate
         if run_config.scheduler == 'exponential':
             learning_rate = tf.train.exponential_decay(
                 learning_rate=learning_rate,
                 global_step=tf.train.get_or_create_global_step(),
                 decay_steps=run_config.decay_steps,
                 decay_rate=run_config.decay_rate,
                 staircase=run_config.staircase)
         elif run_config.scheduler == 'step':
             learning_rate = step_lr(boundaries=run_config.boundaries,
                                     values=run_config.lr_values)
         else:
             learning_rate = tf.constant(learning_rate,
                                         dtype=tf.float32)
         tf.summary.scalar('lr', learning_rate)
         # Optimizer
         optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
         # Hook
         hooks += [
             LoggerHook(learning_rate=learning_rate,
                        log_frequency=run_config.log_frequency,
                        batch_size=run_config.batch_size,
                        loss=loss,
                        accuracy=accuracy,
                        metric_names=run_config.class_names)
         ]
         if hasattr(run_config, 'lr_multiplier'):
             train_op = multi_lr(optimizer, loss, var_list,
                                 run_config.lr_multiplier)
         else:
             train_op = optimizer.minimize(
                 loss,
                 global_step=tf.train.get_global_step(),
                 var_list=var_list)
         return estimator.EstimatorSpec(estimator.ModeKeys.TRAIN,
                                        loss=loss,
                                        training_hooks=hooks,
                                        train_op=train_op)
     elif mode == estimator.ModeKeys.EVAL:
         loss, metrics = model_fn[mode](features, labels, run_config)
         return estimator.EstimatorSpec(estimator.ModeKeys.EVAL,
                                        loss=loss,
                                        eval_metric_ops=metrics)
     elif mode == estimator.ModeKeys.PREDICT:
         predictions = model_fn[mode](features, run_config)
         return estimator.EstimatorSpec(estimator.ModeKeys.PREDICT,
                                        predictions)
     else:
         raise ValueError("Expect mode in [train, eval, infer],"
                          "but received {}".format(mode))
Ejemplo n.º 3
0
 def model_fn(features, labels, mode):
     _ = labels
     if estimator_lib.ModeKeys.TRAIN == mode:
         with ops.control_dependencies([features]):
             train_op = state_ops.assign_add(training.get_global_step(),
                                             1)
         return estimator_lib.EstimatorSpec(
             mode, loss=constant_op.constant(3.), train_op=train_op)
     if estimator_lib.ModeKeys.EVAL == mode:
         return estimator_lib.EstimatorSpec(
             mode,
             loss=constant_op.constant(5.),
             eval_metric_ops={
                 'mean_of_features': metrics_lib.mean(features)
             })
Ejemplo n.º 4
0
 def predict_spec(self, predictions, params):
     named_predictions = {
         'probabilites': predictions,
         'top_1': tf.argmax(predictions, axis=1)
     }
     return estimator.EstimatorSpec(estimator.ModeKeys.PREDICT,
                                    predictions=named_predictions)
Ejemplo n.º 5
0
 def _serving_ops(self, features):
     """Add ops for serving to the graph."""
     with variable_scope.variable_scope("model", use_resource=True):
         prediction_outputs = self.model.predict(features=features)
     with variable_scope.variable_scope("model", reuse=True):
         filtering_outputs = self.create_loss(features,
                                              estimator_lib.ModeKeys.EVAL)
     with variable_scope.variable_scope("model", reuse=True):
         no_state_features = {
             k: v
             for k, v in features.items()
             if not k.startswith(feature_keys.State.STATE_PREFIX)
         }
         # Ignore any state management when cold-starting. The model's default
         # start state is replicated across the batch.
         cold_filtering_outputs = self.model.define_loss(
             features=no_state_features, mode=estimator_lib.ModeKeys.EVAL)
     return estimator_lib.EstimatorSpec(
         mode=estimator_lib.ModeKeys.PREDICT,
         export_outputs={
             feature_keys.SavedModelLabels.PREDICT:
             export_lib.PredictOutput(prediction_outputs),
             feature_keys.SavedModelLabels.FILTER:
             export_lib.PredictOutput(
                 state_to_dictionary(filtering_outputs.end_state)),
             feature_keys.SavedModelLabels.COLD_START_FILTER:
             _NoStatePredictOutput(
                 state_to_dictionary(cold_filtering_outputs.end_state))
         },
         # Likely unused, but it is necessary to return `predictions` to satisfy
         # the Estimator's error checking.
         predictions={})
Ejemplo n.º 6
0
 def _evaluate_ops(self, features):
     """Add ops for evaluation (aka filtering) to the graph."""
     mode = estimator_lib.ModeKeys.EVAL
     with variable_scope.variable_scope("model", use_resource=True):
         model_outputs = self.create_loss(features, mode)
     metrics = {}
     # Just output in-sample predictions for the last chunk seen
     for prediction_key, prediction_value in model_outputs.predictions.items(
     ):
         metrics[prediction_key] = _identity_metric_single(
             prediction_key, prediction_value)
     metrics[feature_keys.FilteringResults.TIMES] = _identity_metric_single(
         feature_keys.FilteringResults.TIMES,
         model_outputs.prediction_times)
     metrics[feature_keys.FilteringResults.STATE_TUPLE] = (
         _identity_metric_nested(feature_keys.FilteringResults.STATE_TUPLE,
                                 model_outputs.end_state))
     metrics[metric_keys.MetricKeys.LOSS_MEAN] = metrics_impl.mean(
         model_outputs.loss, name="average_loss")
     return estimator_lib.EstimatorSpec(
         loss=model_outputs.loss,
         mode=mode,
         eval_metric_ops=metrics,
         # needed for custom metrics.
         predictions=model_outputs.predictions)
Ejemplo n.º 7
0
 def _serving_ops(self, features):
     """Add ops for serving to the graph."""
     with variable_scope.variable_scope("model", use_resource=True):
         filtering_features = {}
         prediction_features = {}
         values_length = array_ops.shape(
             features[feature_keys.FilteringFeatures.VALUES])[1]
         for key, value in features.items():
             if key == feature_keys.State.STATE_TUPLE:
                 # Ignore state input. The model's default start state is replicated
                 # across the batch.
                 continue
             if key == feature_keys.FilteringFeatures.VALUES:
                 filtering_features[key] = value
             else:
                 filtering_features[key] = value[:, :values_length]
                 prediction_features[key] = value[:, values_length:]
         cold_filtering_outputs = self.model.define_loss(
             features=filtering_features, mode=estimator_lib.ModeKeys.EVAL)
         prediction_features[feature_keys.State.STATE_TUPLE] = (
             cold_filtering_outputs.end_state)
     with variable_scope.variable_scope("model", reuse=True):
         prediction_outputs = self.model.predict(
             features=prediction_features)
     return estimator_lib.EstimatorSpec(
         mode=estimator_lib.ModeKeys.PREDICT,
         export_outputs={
             feature_keys.SavedModelLabels.PREDICT:
             _NoStatePredictOutput(prediction_outputs),
         },
         # Likely unused, but it is necessary to return `predictions` to satisfy
         # the Estimator's error checking.
         predictions={})
Ejemplo n.º 8
0
 def model_fn(features, mode):
     del features
     global_step = training.get_global_step()
     return estimator_lib.EstimatorSpec(
         mode,
         loss=constant_op.constant([5.]),
         predictions={'x': constant_op.constant([5.])},
         train_op=global_step.assign_add(1))
Ejemplo n.º 9
0
 def _predict_ops(self, features):
     """Add ops for prediction to the graph."""
     with variable_scope.variable_scope("model", use_resource=True):
         prediction = self.model.predict(features=features)
     prediction[feature_keys.PredictionResults.TIMES] = features[
         feature_keys.PredictionFeatures.TIMES]
     return estimator_lib.EstimatorSpec(predictions=prediction,
                                        mode=estimator_lib.ModeKeys.PREDICT)
def model_fn(features, labels, mode, params):
    """The model_fn argument for creating an Estimator."""
    model = Model(params["data_format"])
    image = features
    if isinstance(image, dict):
        image = features["image"]

    if mode == estimator.ModeKeys.PREDICT:
        logits = model(image, training=False)
        predictions = {
            "classes": math_ops.argmax(logits, axis=1),
            "probabilities": nn.softmax(logits),
        }
        return estimator.EstimatorSpec(
            mode=estimator.ModeKeys.PREDICT,
            predictions=predictions,
            export_outputs={
                "classify": estimator.export.PredictOutput(predictions)
            })

    elif mode == estimator.ModeKeys.TRAIN:
        optimizer = train.AdamOptimizer(learning_rate=1e-4)

        logits = model(image, training=True)
        loss = losses.sparse_softmax_cross_entropy(labels=labels,
                                                   logits=logits)
        return estimator.EstimatorSpec(mode=estimator.ModeKeys.TRAIN,
                                       loss=loss,
                                       train_op=optimizer.minimize(
                                           loss,
                                           train.get_or_create_global_step()))

    elif mode == estimator.ModeKeys.EVAL:
        logits = model(image, training=False)
        loss = losses.sparse_softmax_cross_entropy(labels=labels,
                                                   logits=logits)
        return estimator.EstimatorSpec(
            mode=estimator.ModeKeys.EVAL,
            loss=loss,
            eval_metric_ops={
                "accuracy":
                ops.metrics.accuracy(labels=labels,
                                     predictions=math_ops.argmax(logits,
                                                                 axis=1)),
            })
Ejemplo n.º 11
0
 def model_fn(features, labels, mode):
     _, _ = features, labels
     return estimator_lib.EstimatorSpec(
         mode,
         loss=constant_op.constant(3.),
         scaffold=training.Scaffold(saver=training.Saver()),
         train_op=constant_op.constant(5.),
         eval_metric_ops={
             'mean_of_features':
             metrics_lib.mean(constant_op.constant(2.))
         })
Ejemplo n.º 12
0
    def eval_spec(self, loss, predictions, labels, params):
        # Define the metrics:
        metrics_dict = {
            'Accuracy':
            tf.metrics.accuracy(tf.argmax(predictions, axis=-1),
                                tf.argmax(labels, axis=-1))
        }

        # return eval spec
        return estimator.EstimatorSpec(estimator.ModeKeys.EVAL,
                                       loss=loss,
                                       eval_metric_ops=metrics_dict)
Ejemplo n.º 13
0
 def _train_ops(self, features):
     """Add training ops to the graph."""
     mode = estimator_lib.ModeKeys.TRAIN
     with variable_scope.variable_scope("model"):
         model_outputs = self.create_loss(features, mode)
     train_op = optimizers.optimize_loss(
         model_outputs.loss,
         global_step=training_util.get_global_step(),
         optimizer=self.optimizer,
         # Learning rate is set in the Optimizer object
         learning_rate=None)
     return estimator_lib.EstimatorSpec(loss=model_outputs.loss,
                                        mode=mode,
                                        train_op=train_op)
Ejemplo n.º 14
0
    def _train_ops(self, features):
        """Add training ops to the graph."""
        mode = estimator_lib.ModeKeys.TRAIN
        with variable_scope.variable_scope(
                "model",
                # Use ResourceVariables to avoid race conditions.
                use_resource=True):
            model_outputs = self.create_loss(features, mode)

        train_op = self.optimizer.minimize(
            model_outputs.loss, global_step=training_util.get_global_step())
        return estimator_lib.EstimatorSpec(loss=model_outputs.loss,
                                           mode=mode,
                                           train_op=train_op)
Ejemplo n.º 15
0
 def _train(features):
     """Add training ops to the graph."""
     with variable_scope.variable_scope("model"):
         model_outputs = state_manager.define_loss(
             model, features, estimator_lib.ModeKeys.TRAIN)
     train_op = optimizers.optimize_loss(
         model_outputs.loss,
         global_step=variables.get_global_step(),
         optimizer=optimizer,
         # Learning rate is set in the Optimizer object
         learning_rate=None)
     return estimator_lib.EstimatorSpec(loss=model_outputs.loss,
                                        mode=estimator_lib.ModeKeys.TRAIN,
                                        train_op=train_op)
Ejemplo n.º 16
0
        def model_fn(features, labels, mode):
            _, _ = features, labels

            def init_fn(scaffold, session):
                _, _ = scaffold, session

            return estimator_lib.EstimatorSpec(
                mode,
                loss=constant_op.constant(3.),
                scaffold=training.Scaffold(init_fn=init_fn),
                train_op=constant_op.constant(5.),
                eval_metric_ops={
                    'mean_of_features':
                    metrics_lib.mean(constant_op.constant(2.))
                })
Ejemplo n.º 17
0
 def model_fn(features, labels, mode):
     _, _ = features, labels
     w = variables.Variable(
         initial_value=[0.],
         trainable=False,
         collections=[ops.GraphKeys.SAVEABLE_OBJECTS])
     init_op = control_flow_ops.group(
         [w.initializer,
          training.get_global_step().initializer])
     return estimator_lib.EstimatorSpec(
         mode,
         loss=constant_op.constant(3.),
         scaffold=training.Scaffold(init_op=init_op),
         train_op=constant_op.constant(5.),
         eval_metric_ops={
             'mean_of_features':
             metrics_lib.mean(constant_op.constant(2.))
         })
Ejemplo n.º 18
0
 def _serving(features):
     with variable_scope.variable_scope("model"):
         prediction_outputs = model.predict(features=features)
     with variable_scope.variable_scope("model", reuse=True):
         filtering_outputs = state_manager.define_loss(
             model, features, estimator_lib.ModeKeys.EVAL)
     return estimator_lib.EstimatorSpec(
         mode=estimator_lib.ModeKeys.PREDICT,
         export_outputs={
             feature_keys.SavedModelLabels.PREDICT:
             export_lib.PredictOutput(prediction_outputs),
             feature_keys.SavedModelLabels.FILTER:
             export_lib.PredictOutput(
                 state_to_dictionary(filtering_outputs.end_state))
         },
         # Likely unused, but it is necessary to return `predictions` to satisfy
         # the Estimator's error checking.
         predictions={})
Ejemplo n.º 19
0
 def _serving_ops(self, features):
   """Add ops for serving to the graph."""
   with variable_scope.variable_scope("model", use_resource=True):
     prediction_outputs = self.model.predict(features=features)
   with variable_scope.variable_scope("model", reuse=True):
     filtering_outputs = self.create_loss(
         features, estimator_lib.ModeKeys.EVAL)
   return estimator_lib.EstimatorSpec(
       mode=estimator_lib.ModeKeys.PREDICT,
       export_outputs={
           feature_keys.SavedModelLabels.PREDICT:
               export_lib.PredictOutput(prediction_outputs),
           feature_keys.SavedModelLabels.FILTER:
               export_lib.PredictOutput(
                   state_to_dictionary(filtering_outputs.end_state))
       },
       # Likely unused, but it is necessary to return `predictions` to satisfy
       # the Estimator's error checking.
       predictions={})
Ejemplo n.º 20
0
  def _train_ops(self, features):
    """Add training ops to the graph."""
    with variable_scope.variable_scope(
        "model",
        # Use ResourceVariables to avoid race conditions.
        use_resource=True):
      model_outputs = self.state_manager.define_loss(
          self.model, features, estimator_lib.ModeKeys.TRAIN)

    train_op = optimizers.optimize_loss(
        model_outputs.loss,
        global_step=training_util.get_global_step(),
        optimizer=self.optimizer,
        # Learning rate is set in the Optimizer object
        learning_rate=None)
    return estimator_lib.EstimatorSpec(
        loss=model_outputs.loss,
        mode=estimator_lib.ModeKeys.TRAIN,
        train_op=train_op)
Ejemplo n.º 21
0
 def _evaluate_ops(self, features):
   """Add ops for evaluation (aka filtering) to the graph."""
   with variable_scope.variable_scope("model", use_resource=True):
     model_outputs = self.state_manager.define_loss(
         self.model, features, estimator_lib.ModeKeys.EVAL)
   metrics = {}
   # Just output in-sample predictions for the last chunk seen
   for prediction_key, prediction_value in model_outputs.predictions.items():
     metrics[prediction_key] = _identity_metric_single(prediction_key,
                                                       prediction_value)
   metrics[feature_keys.FilteringResults.TIMES] = _identity_metric_single(
       feature_keys.FilteringResults.TIMES, model_outputs.prediction_times)
   metrics[feature_keys.FilteringResults.STATE_TUPLE] = (
       _identity_metric_nested(feature_keys.FilteringResults.STATE_TUPLE,
                               model_outputs.end_state))
   return estimator_lib.EstimatorSpec(
       loss=model_outputs.loss,
       mode=estimator_lib.ModeKeys.EVAL,
       eval_metric_ops=metrics,
       predictions={})
Ejemplo n.º 22
0
 def train_spec(self, loss, params):
     train_op = self._training_op(loss, params)
     return estimator.EstimatorSpec(estimator.ModeKeys.TRAIN,
                                    loss=loss,
                                    train_op=train_op)