示例#1
0
    def _input_fn(params=None):
        """The `Dataset` input_fn which will be returned."""

        del params  # Unused.

        input_features = tf_compat.make_one_shot_iterator(
            tf.data.Dataset.from_tensors([features])).get_next()
        if labels is not None:
            input_labels = tf_compat.make_one_shot_iterator(
                tf.data.Dataset.from_tensors([labels])).get_next()
        else:
            input_labels = None
        return {"x": input_features}, input_labels
示例#2
0
文件: estimator.py 项目: todun/adanet
  def build_subnetwork(self, features, labels, logits_dimension, training,
                       iteration_step, summary, previous_ensemble):
    # We don't need an EVAL mode since AdaNet takes care of evaluation for us.
    mode = tf.estimator.ModeKeys.PREDICT
    if training:
      mode = tf.estimator.ModeKeys.TRAIN

    # Call in template to ensure that variables are created once and reused.
    call_model_fn_template = tf.make_template("model_fn", self._call_model_fn)
    subestimator_features, subestimator_labels = features, labels
    if training and self._subestimator.train_input_fn:
      # TODO: Consider tensorflow_estimator/python/estimator/util.py.
      inputs = self._subestimator.train_input_fn()
      if isinstance(inputs, (tf_compat.DatasetV1, tf_compat.DatasetV2)):
        subestimator_features, subestimator_labels = (
            tf_compat.make_one_shot_iterator(inputs).get_next())
      else:
        subestimator_features, subestimator_labels = inputs

      # Construct subnetwork graph first because of dependencies on scope.
      _, train_op = call_model_fn_template(subestimator_features,
                                           subestimator_labels, mode, summary)
      # Graph for ensemble learning gets model_fn_1 for scope.
      logits, _ = call_model_fn_template(features, labels, mode, summary)
    else:
      logits, train_op = call_model_fn_template(features, labels, mode, summary)

    # TODO: Replace with variance complexity measure.
    complexity = tf.constant(0.)
    return Subnetwork(
        logits=logits,
        last_layer=logits,
        shared={"train_op": train_op},
        complexity=complexity)
示例#3
0
    def build_subnetwork(self, features, labels, logits_dimension, training,
                         iteration_step, summary, previous_ensemble):
        # We don't need an EVAL mode since AdaNet takes care of evaluation for us.
        mode = tf.estimator.ModeKeys.PREDICT
        if training:
            mode = tf.estimator.ModeKeys.TRAIN

        # Call in template to ensure that variables are created once and reused.
        call_model_fn_template = tf.make_template("model_fn",
                                                  self._call_model_fn)
        logits, train_op = call_model_fn_template(features, labels, mode)
        if training and self._subestimator.train_input_fn:
            # TODO: Consider tensorflow_estimator/python/estimator/util.py.
            inputs = self._subestimator.train_input_fn()
            if isinstance(inputs, (tf_compat.DatasetV1, tf_compat.DatasetV2)):
                features, labels = tf_compat.make_one_shot_iterator(
                    inputs).get_next()
            else:
                features, labels = inputs
            # Use different training set for train op only.
            _, train_op = call_model_fn_template(features, labels, mode)

        self._subnetwork_train_op = train_op

        # TODO: Replace with variance complexity measure.
        complexity = tf.constant(0.)
        return Subnetwork(logits=logits,
                          last_layer=logits,
                          persisted_tensors={},
                          complexity=complexity)
示例#4
0
    def build_subnetwork(self, features, labels, logits_dimension, training,
                         iteration_step, summary, previous_ensemble):
        # We don't need an EVAL mode since AdaNet takes care of evaluation for us.
        mode = tf.estimator.ModeKeys.PREDICT
        if training:
            mode = tf.estimator.ModeKeys.TRAIN

        # Call in template to ensure that variables are created once and reused.
        call_model_fn_template = tf.compat.v1.make_template(
            "model_fn", self._call_model_fn)
        subestimator_features, subestimator_labels = features, labels
        local_init_ops = []
        if training and self._subestimator.train_input_fn:
            # TODO: Consider tensorflow_estimator/python/estimator/util.py.
            inputs = self._subestimator.train_input_fn()
            if isinstance(inputs, (tf_compat.DatasetV1, tf_compat.DatasetV2)):
                subestimator_features, subestimator_labels = (
                    tf_compat.make_one_shot_iterator(inputs).get_next())
            else:
                subestimator_features, subestimator_labels = inputs

            # Construct subnetwork graph first because of dependencies on scope.
            _, _, bagging_train_op_spec, sub_local_init_op = call_model_fn_template(
                subestimator_features, subestimator_labels, mode, summary)
            # Graph for ensemble learning gets model_fn_1 for scope.
            logits, last_layer, _, ensemble_local_init_op = call_model_fn_template(
                features, labels, mode, summary)

            if sub_local_init_op:
                local_init_ops.append(sub_local_init_op)
            if ensemble_local_init_op:
                local_init_ops.append(ensemble_local_init_op)

            # Run train op in a hook so that exceptions can be intercepted by the
            # AdaNet framework instead of the Estimator's monitored training session.
            hooks = bagging_train_op_spec.hooks + (_SecondaryTrainOpRunnerHook(
                bagging_train_op_spec.train_op), )
            train_op_spec = TrainOpSpec(
                train_op=tf.no_op(),
                chief_hooks=bagging_train_op_spec.chief_hooks,
                hooks=hooks)
        else:
            logits, last_layer, train_op_spec, local_init_op = call_model_fn_template(
                features, labels, mode, summary)
            if local_init_op:
                local_init_ops.append(local_init_op)

        # TODO: Replace with variance complexity measure.
        complexity = tf.constant(0.)
        return Subnetwork(logits=logits,
                          last_layer=last_layer,
                          shared={"train_op": train_op_spec},
                          complexity=complexity,
                          local_init_ops=local_init_ops)