Example #1
0
 def __init__(self, val_X, val_y, n_classes=0, print_steps=100,
              early_stopping_rounds=None):
     super(ValidationMonitor, self).__init__(print_steps=print_steps,
                                             early_stopping_rounds=early_stopping_rounds)
     self.val_feeder = setup_train_data_feeder(val_X, val_y, n_classes, -1)
     self.print_val_loss_buffer = []
     self.all_val_loss_buffer = []
Example #2
0
 def __init__(self, val_X, val_y, n_classes=0, print_steps=100,
              early_stopping_rounds=None):
     super(ValidationMonitor, self).__init__(print_steps=print_steps,
                                             early_stopping_rounds=early_stopping_rounds)
     self.val_feeder = setup_train_data_feeder(val_X, val_y, n_classes, -1)
     self.print_val_loss_buffer = []
     self.all_val_loss_buffer = []
Example #3
0
    def fit(self, X, y, monitor=None, logdir=None):
        """Builds a neural network model given provided `model_fn` and training
        data X and y.

        Note: called first time constructs the graph and initializers
        variables. Consecutives times it will continue training the same model.
        This logic follows partial_fit() interface in scikit-learn.

        To restart learning, create new estimator.

        Args:
            X: matrix or tensor of shape [n_samples, n_features...]. Can be
            iterator that returns arrays of features. The training input
            samples for fitting the model.
            y: vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
            iterator that returns array of targets. The training target values
            (class labels in classification, real numbers in regression).
            monitor: Monitor object to print training progress and invoke early stopping
            logdir: the directory to save the log file that can be used for
            optional visualization.

        Returns:
            Returns self.
        """
        # Sets up data feeder.
        self._data_feeder = setup_train_data_feeder(X, y, self.n_classes,
                                                    self.batch_size)

        if monitor is None:
            self._monitor = monitors.default_monitor(verbose=self.verbose)
        else:
            self._monitor = monitor

        if not self.continue_training or not self._initialized:
            # Sets up model and trainer.
            self._setup_training()
            self._initialized = True
        else:
            self._data_feeder.set_placeholders(self._inp, self._out)

        # Sets up summary writer for later optional visualization.
        # Due to not able to setup _summary_writer in __init__ as it's not a
        # parameter of the model, here we need to check if such variable exists
        # and if it's None or not (in case it was setup in a previous run).
        # It is initialized only in the case where it wasn't before and log dir
        # is provided.
        if logdir:
            if (not hasattr(self, "_summary_writer")
                    or (hasattr(self, "_summary_writer")
                        and self._summary_writer is None)):
                self._setup_summary_writer(logdir)
        else:
            self._summary_writer = None

        # Train model for given number of steps.
        trainer.train(self._session,
                      self._train,
                      self._model_loss,
                      self._global_step,
                      self._data_feeder.get_feed_dict_fn(),
                      steps=self.steps,
                      monitor=self._monitor,
                      summary_writer=self._summary_writer,
                      summaries=self._summaries,
                      feed_params_fn=self._data_feeder.get_feed_params)
        return self
Example #4
0
    def fit(self, X, y, monitor=None, logdir=None):
        """Builds a neural network model given provided `model_fn` and training
        data X and y.

        Note: called first time constructs the graph and initializers
        variables. Consecutives times it will continue training the same model.
        This logic follows partial_fit() interface in scikit-learn.

        To restart learning, create new estimator.

        Args:
            X: matrix or tensor of shape [n_samples, n_features...]. Can be
            iterator that returns arrays of features. The training input
            samples for fitting the model.
            y: vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
            iterator that returns array of targets. The training target values
            (class labels in classification, real numbers in regression).
            monitor: Monitor object to print training progress and invoke early stopping
            logdir: the directory to save the log file that can be used for
            optional visualization.

        Returns:
            Returns self.
        """
        # Sets up data feeder.
        self._data_feeder = setup_train_data_feeder(X, y,
                                                    self.n_classes,
                                                    self.batch_size)

        if monitor is None:
            self._monitor = monitors.default_monitor(verbose=self.verbose)
        else:
            self._monitor = monitor

        if not self.continue_training or not self._initialized:
            # Sets up model and trainer.
            self._setup_training()
            self._initialized = True
        else:
            self._data_feeder.set_placeholders(self._inp, self._out)

        # Sets up summary writer for later optional visualization.
        # Due to not able to setup _summary_writer in __init__ as it's not a
        # parameter of the model, here we need to check if such variable exists
        # and if it's None or not (in case it was setup in a previous run).
        # It is initialized only in the case where it wasn't before and log dir
        # is provided.
        if logdir:
            if (not hasattr(self, "_summary_writer") or
                    (hasattr(self, "_summary_writer") and self._summary_writer is None)):
                self._setup_summary_writer(logdir)
        else:
            self._summary_writer = None

        # Train model for given number of steps.
        trainer.train(
            self._session, self._train, 
            self._model_loss, self._global_step,
            self._data_feeder.get_feed_dict_fn(),
            steps=self.steps,
            monitor=self._monitor,
            summary_writer=self._summary_writer,
            summaries=self._summaries,
            feed_params_fn=self._data_feeder.get_feed_params)
        return self