Exemple #1
0
def _get_input_fn(x,
                  y,
                  input_fn,
                  feed_fn,
                  batch_size,
                  shuffle=False,
                  epochs=1):
    """Make inputs into input and feed functions."""
    if input_fn is None:
        if x is None:
            raise ValueError('Either x or input_fn must be provided.')

        if contrib_framework.is_tensor(x) or (y is not None and
                                              contrib_framework.is_tensor(y)):
            raise ValueError(
                'Inputs cannot be tensors. Please provide input_fn.')

        df = data_feeder.setup_train_data_feeder(x,
                                                 y,
                                                 n_classes=None,
                                                 batch_size=batch_size,
                                                 shuffle=shuffle,
                                                 epochs=epochs)
        return df.input_builder, df.get_feed_dict_fn()

    if (x is not None) or (y is not None):
        raise ValueError('Can not provide both input_fn and x or y.')
    if batch_size is not None:
        raise ValueError('Can not provide both input_fn and batch_size.')

    return input_fn, feed_fn
Exemple #2
0
    def fit(self, x, y=None, monitors=None, logdir=None, steps=None):
        """Trains a k-means clustering on x.

    Note: See TensorFlowEstimator for logic for continuous training and graph
      construction across multiple calls to fit.

    Args:
      x: training input matrix of shape [n_samples, n_features].
      y: labels. Should be None.
      monitors: Monitor object to print training progress and invoke early
        stopping
      logdir: the directory to save the log file that can be used for optional
        visualization.
      steps: number of training steps. If not None, overrides the value passed
        in constructor.

    Returns:
      Returns self.
    """
        assert y is None
        if logdir is not None:
            self._model_dir = logdir
        self._data_feeder = data_feeder.setup_train_data_feeder(x, None, self._num_clusters, self.batch_size)
        self._train_model(
            input_fn=self._data_feeder.input_builder,
            feed_fn=self._data_feeder.get_feed_dict_fn(),
            steps=steps or self.steps,
            monitors=monitors,
            init_feed_fn=self._data_feeder.get_feed_dict_fn(),
        )
        return self
Exemple #3
0
def _get_input_fn(x, y, input_fn, feed_fn, batch_size, shuffle=False, epochs=1):
  """Make inputs into input and feed functions."""
  if input_fn is None:
    if x is None:
      raise ValueError('Either x or input_fn must be provided.')

    if contrib_framework.is_tensor(x) or (y is not None and
                                          contrib_framework.is_tensor(y)):
      raise ValueError('Inputs cannot be tensors. Please provide input_fn.')

    if feed_fn is not None:
      raise ValueError('Can not provide both feed_fn and x or y.')

    df = data_feeder.setup_train_data_feeder(x, y, n_classes=None,
                                             batch_size=batch_size,
                                             shuffle=shuffle,
                                             epochs=epochs)
    return df.input_builder, df.get_feed_dict_fn()

  if (x is not None) or (y is not None):
    raise ValueError('Can not provide both input_fn and x or y.')
  if batch_size is not None:
    raise ValueError('Can not provide both input_fn and batch_size.')

  return input_fn, feed_fn
Exemple #4
0
    def fit(self, x, y=None, monitors=None, logdir=None, steps=None):
        """Trains a k-means clustering on x.

    Note: See TensorFlowEstimator for logic for continuous training and graph
      construction across multiple calls to fit.

    Args:
      x: training input matrix of shape [n_samples, n_features].
      y: labels. Should be None.
      monitors: Monitor object to print training progress and invoke early
        stopping
      logdir: the directory to save the log file that can be used for optional
        visualization.
      steps: number of training steps. If not None, overrides the value passed
        in constructor.

    Returns:
      Returns self.
    """
        assert y is None
        if logdir is not None:
            self._model_dir = logdir
        self._data_feeder = data_feeder.setup_train_data_feeder(
            x, None, self._num_clusters, self.batch_size)
        self._train_model(input_fn=self._data_feeder.input_builder,
                          feed_fn=self._data_feeder.get_feed_dict_fn(),
                          steps=steps or self.steps,
                          monitors=monitors,
                          init_feed_fn=self._data_feeder.get_feed_dict_fn())
        return self
Exemple #5
0
 def predict(self,
             x=None,
             input_fn=None,
             batch_size=None,
             outputs=None,
             axis=1):
     """Predict class or regression for `x`."""
     if x is not None:
         predict_data_feeder = setup_train_data_feeder(x,
                                                       None,
                                                       n_classes=None,
                                                       batch_size=batch_size
                                                       or self.batch_size,
                                                       shuffle=False,
                                                       epochs=1)
         result = super(DeprecatedMixin, self)._infer_model(
             input_fn=predict_data_feeder.input_builder,
             feed_fn=predict_data_feeder.get_feed_dict_fn(),
             outputs=outputs)
     else:
         result = super(DeprecatedMixin,
                        self)._infer_model(input_fn=input_fn,
                                           outputs=outputs)
     if self.__deprecated_n_classes > 1 and axis is not None:
         return np.argmax(result, axis)
     return result
Exemple #6
0
 def __init__(self, val_X, val_y, n_classes=0, print_steps=100,
              early_stopping_rounds=None):
     super(ValidationMonitor, self).__init__(print_steps=print_steps,
                                             early_stopping_rounds=early_stopping_rounds)
     self.val_feeder = setup_train_data_feeder(val_X, val_y, n_classes, -1)
     self.print_val_loss_buffer = []
     self.all_val_loss_buffer = []
Exemple #7
0
  def fit(self, x, y, steps=None, monitors=None, logdir=None):
    """Neural network model from provided `model_fn` and training data.

    Note: called first time constructs the graph and initializers
    variables. Consecutives times it will continue training the same model.
    This logic follows partial_fit() interface in scikit-learn.
    To restart learning, create new estimator.

    Args:
      x: matrix or tensor of shape [n_samples, n_features...]. Can be
      iterator that returns arrays of features. The training input
      samples for fitting the model.
      y: vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
      iterator that returns array of targets. The training target values
      (class labels in classification, real numbers in regression).
      steps: int, number of steps to train.
             If None or 0, train for `self.steps`.
      monitors: List of `BaseMonitor` objects to print training progress and
        invoke early stopping.
      logdir: the directory to save the log file that can be used for
      optional visualization.

    Returns:
      Returns self.
    """
    if logdir is not None:
      self._model_dir = logdir
    self._data_feeder = setup_train_data_feeder(
        x, y, n_classes=self.n_classes, batch_size=self.batch_size)
    self._train_model(input_fn=self._data_feeder.input_builder,
                      feed_fn=self._data_feeder.get_feed_dict_fn(),
                      steps=steps or self.steps,
                      monitors=monitors)
    return self
Exemple #8
0
  def fit(self, x, y, steps=None, monitors=None, logdir=None):
    """Neural network model from provided `model_fn` and training data.

    Note: called first time constructs the graph and initializers
    variables. Consecutives times it will continue training the same model.
    This logic follows partial_fit() interface in scikit-learn.
    To restart learning, create new estimator.

    Args:
      x: matrix or tensor of shape [n_samples, n_features...]. Can be
      iterator that returns arrays of features. The training input
      samples for fitting the model.
      y: vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
      iterator that returns array of targets. The training target values
      (class labels in classification, real numbers in regression).
      steps: int, number of steps to train.
             If None or 0, train for `self.steps`.
      monitors: List of `BaseMonitor` objects to print training progress and
        invoke early stopping.
      logdir: the directory to save the log file that can be used for
      optional visualization.

    Returns:
      Returns self.
    """
    if logdir is not None:
      self._model_dir = logdir
    self._data_feeder = setup_train_data_feeder(
        x, y, n_classes=self.n_classes, batch_size=self.batch_size)
    self._train_model(input_fn=self._data_feeder.input_builder,
                      feed_fn=self._data_feeder.get_feed_dict_fn(),
                      steps=steps or self.steps,
                      monitors=monitors)
    return self
Exemple #9
0
def _get_predict_input_fn(x, y, batch_size):
    df = data_feeder.setup_train_data_feeder(x,
                                             y,
                                             n_classes=None,
                                             batch_size=batch_size,
                                             shuffle=False,
                                             epochs=1)
    return df.input_builder, df.get_feed_dict_fn()
Exemple #10
0
 def evaluate(self, x=None, y=None, input_fn=None, steps=None):
     """See base class."""
     feed_fn = None
     if x is not None:
         eval_data_feeder = setup_train_data_feeder(
             x, y, n_classes=self.n_classes, batch_size=self.batch_size, epochs=1
         )
         input_fn, feed_fn = (eval_data_feeder.input_builder, eval_data_feeder.get_feed_dict_fn())
     return self._evaluate_model(input_fn=input_fn, feed_fn=feed_fn, steps=steps or self.steps)
Exemple #11
0
 def evaluate(self, x=None, y=None, input_fn=None, steps=None):
   """See base class."""
   feed_fn = None
   if x is not None:
     eval_data_feeder = setup_train_data_feeder(
         x, y, n_classes=self.n_classes, batch_size=self.batch_size, epochs=1)
     input_fn, feed_fn = (eval_data_feeder.input_builder,
                          eval_data_feeder.get_feed_dict_fn())
   return self._evaluate_model(
       input_fn=input_fn, feed_fn=feed_fn, steps=steps or self.steps)
Exemple #12
0
def _get_predict_input_fn(x, batch_size):
  # TODO(ipoloshukin): Remove this when refactor of data_feeder is done
  if hasattr(x, 'create_graph'):
    def input_fn():
      return x.create_graph()
    return input_fn, None

  df = data_feeder.setup_train_data_feeder(x, None,
                                           n_classes=None,
                                           batch_size=batch_size)
  return df.input_builder, df.get_feed_dict_fn()
Exemple #13
0
def _get_predict_input_fn(x, batch_size):
  # TODO(ipoloshukin): Remove this when refactor of data_feeder is done
  if hasattr(x, 'create_graph'):
    def input_fn():
      return x.create_graph()
    return input_fn, None

  df = data_feeder.setup_train_data_feeder(x, None,
                                           n_classes=None,
                                           batch_size=batch_size, epochs=1)
  return df.input_builder, df.get_feed_dict_fn()
Exemple #14
0
 def predict(self, x=None, input_fn=None, batch_size=None, outputs=None,
             axis=1):
   if x is not None:
     predict_data_feeder = setup_train_data_feeder(
         x, None, n_classes=None,
         batch_size=batch_size or self.batch_size,
         shuffle=False, epochs=1)
     result = super(DeprecatedMixin, self)._infer_model(
       input_fn=predict_data_feeder.input_builder,
       feed_fn=predict_data_feeder.get_feed_dict_fn(),
       outputs=outputs)
   else:
     result = super(DeprecatedMixin, self)._infer_model(
     input_fn=input_fn, outputs=outputs)
   if self.__deprecated_n_classes > 1 and axis is not None:
     return np.argmax(result, axis)
   return result
Exemple #15
0
    def _predict(self, x, axis=-1, batch_size=None):
        if self._graph is None:
            raise NotFittedError()
        # Use the batch size for fitting if the user did not specify one.
        if batch_size is None:
            batch_size = self.batch_size

        predict_data_feeder = setup_train_data_feeder(
            x, None, n_classes=None, batch_size=batch_size, shuffle=False, epochs=1
        )

        preds = self._infer_model(
            input_fn=predict_data_feeder.input_builder, feed_fn=predict_data_feeder.get_feed_dict_fn()
        )
        if self.n_classes > 1 and axis != -1:
            preds = preds["predictions"].argmax(axis=axis)
        else:
            preds = preds["predictions"]
        return preds
Exemple #16
0
  def _predict(self, x, axis=-1, batch_size=None):
    if self._graph is None:
      raise NotFittedError()
    # Use the batch size for fitting if the user did not specify one.
    if batch_size is None:
      batch_size = self.batch_size

    predict_data_feeder = setup_train_data_feeder(
        x, None, n_classes=None,
        batch_size=batch_size,
        shuffle=False, epochs=1)

    preds = self._infer_model(
        input_fn=predict_data_feeder.input_builder,
        feed_fn=predict_data_feeder.get_feed_dict_fn())
    if self.n_classes > 1 and axis != -1:
      preds = preds['predictions'].argmax(axis=axis)
    else:
      preds = preds['predictions']
    return preds
Exemple #17
0
    def fit(self, X, y, monitor=None, logdir=None):
        """Builds a neural network model given provided `model_fn` and training
        data X and y.

        Note: called first time constructs the graph and initializers
        variables. Consecutives times it will continue training the same model.
        This logic follows partial_fit() interface in scikit-learn.

        To restart learning, create new estimator.

        Args:
            X: matrix or tensor of shape [n_samples, n_features...]. Can be
            iterator that returns arrays of features. The training input
            samples for fitting the model.
            y: vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
            iterator that returns array of targets. The training target values
            (class labels in classification, real numbers in regression).
            monitor: Monitor object to print training progress and invoke early stopping
            logdir: the directory to save the log file that can be used for
            optional visualization.

        Returns:
            Returns self.
        """
        # Sets up data feeder.
        self._data_feeder = setup_train_data_feeder(X, y,
                                                    self.n_classes,
                                                    self.batch_size)

        if monitor is None:
            self._monitor = monitors.default_monitor(verbose=self.verbose)
        else:
            self._monitor = monitor

        if not self.continue_training or not self._initialized:
            # Sets up model and trainer.
            self._setup_training()
            self._initialized = True
        else:
            self._data_feeder.set_placeholders(self._inp, self._out)

        # Sets up summary writer for later optional visualization.
        # Due to not able to setup _summary_writer in __init__ as it's not a
        # parameter of the model, here we need to check if such variable exists
        # and if it's None or not (in case it was setup in a previous run).
        # It is initialized only in the case where it wasn't before and log dir
        # is provided.
        if logdir:
            if (not hasattr(self, "_summary_writer") or
                    (hasattr(self, "_summary_writer") and self._summary_writer is None)):
                self._setup_summary_writer(logdir)
        else:
            self._summary_writer = None

        # Attach monitor to this estimator.
        self._monitor.set_estimator(self)

        # Train model for given number of steps.
        trainer.train(
            self._session, self._train, 
            self._model_loss, self._global_step,
            self._data_feeder.get_feed_dict_fn(),
            steps=self.steps,
            monitor=self._monitor,
            summary_writer=self._summary_writer,
            summaries=self._summaries,
            feed_params_fn=self._data_feeder.get_feed_params)
        return self
Exemple #18
0
def _get_input_fn(x, y, batch_size):
    df = data_feeder.setup_train_data_feeder(x,
                                             y,
                                             n_classes=None,
                                             batch_size=batch_size)
    return df.input_builder, df.get_feed_dict_fn()
Exemple #19
0
    def fit(self, X, y, monitor=None, logdir=None):
        """Builds a neural network model given provided `model_fn` and training
        data X and y.

        Note: called first time constructs the graph and initializers
        variables. Consecutives times it will continue training the same model.
        This logic follows partial_fit() interface in scikit-learn.

        To restart learning, create new estimator.

        Args:
            X: matrix or tensor of shape [n_samples, n_features...]. Can be
            iterator that returns arrays of features. The training input
            samples for fitting the model.
            y: vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
            iterator that returns array of targets. The training target values
            (class labels in classification, real numbers in regression).
            monitor: Monitor object to print training progress and invoke early stopping
            logdir: the directory to save the log file that can be used for
            optional visualization.

        Returns:
            Returns self.
        """
        # Sets up data feeder.
        self._data_feeder = setup_train_data_feeder(X, y, self.n_classes,
                                                    self.batch_size)

        if monitor is None:
            self._monitor = monitors.default_monitor(verbose=self.verbose)
        else:
            self._monitor = monitor

        if not self.continue_training or not self._initialized:
            # Sets up model and trainer.
            self._setup_training()
            self._initialized = True
        else:
            self._data_feeder.set_placeholders(self._inp, self._out)

        # Sets up summary writer for later optional visualization.
        # Due to not able to setup _summary_writer in __init__ as it's not a
        # parameter of the model, here we need to check if such variable exists
        # and if it's None or not (in case it was setup in a previous run).
        # It is initialized only in the case where it wasn't before and log dir
        # is provided.
        if logdir:
            if (not hasattr(self, "_summary_writer")
                    or (hasattr(self, "_summary_writer")
                        and self._summary_writer is None)):
                self._setup_summary_writer(logdir)
        else:
            self._summary_writer = None

        # Train model for given number of steps.
        trainer.train(self._session,
                      self._train,
                      self._model_loss,
                      self._global_step,
                      self._data_feeder.get_feed_dict_fn(),
                      steps=self.steps,
                      monitor=self._monitor,
                      summary_writer=self._summary_writer,
                      summaries=self._summaries,
                      feed_params_fn=self._data_feeder.get_feed_params)
        return self
Exemple #20
0
def _get_predict_input_fn(x, y, batch_size):
  df = data_feeder.setup_train_data_feeder(
      x, y, n_classes=None, batch_size=batch_size,
      shuffle=False, epochs=1)
  return df.input_builder, df.get_feed_dict_fn()
Exemple #21
0
def _get_input_fn(x, y, batch_size):
  df = data_feeder.setup_train_data_feeder(
      x, y, n_classes=None, batch_size=batch_size)
  return df.input_builder, df.get_feed_dict_fn()