Example #1
0
 def __init__(self, val_X, val_y, n_classes=0, print_steps=100,
              early_stopping_rounds=None):
     super(ValidationMonitor, self).__init__(print_steps=print_steps,
                                             early_stopping_rounds=early_stopping_rounds)
     self.val_feeder = setup_train_data_feeder(val_X, val_y, n_classes, -1)
     self.print_val_loss_buffer = []
     self.all_val_loss_buffer = []
Example #2
0
    def fit(self, X, y, logdir=None):
        """Builds a neural network model given provided `model_fn` and training
        data X and y.

        Note: called first time constructs the graph and initializers
        variables. Consecutives times it will continue training the same model.
        This logic follows partial_fit() interface in scikit-learn.

        To restart learning, create new estimator.

        Args:
            X: matrix or tensor of shape [n_samples, n_features...]. Can be
            iterator that returns arrays of features. The training input
            samples for fitting the model.
            y: vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
            iterator that returns array of targets. The training target values
            (class labels in classification, real numbers in regression).
            logdir: the directory to save the log file that can be used for
            optional visualization.

        Returns:
            Returns self.
        """
        # Sets up data feeder.
        self._data_feeder = setup_train_data_feeder(X, y,
                                                    self.n_classes,
                                                    self.batch_size)
        if not self.continue_training or not self._initialized:
            # Sets up model and trainer.
            self._setup_training()
            # Initialize model parameters.
            self._trainer.initialize(self._session)
            self._initialized = True

        # Sets up summary writer for later optional visualization.
        # Due to not able to setup _summary_writer in __init__ as it's not a
        # parameter of the model, here we need to check if such variable exists
        # and if it's None or not (in case it was setup in a previous run).
        # It is initialized only in the case where it wasn't before and log dir
        # is provided.
        if logdir:
            if (not hasattr(self, "_summary_writer") or
                    (hasattr(self, "_summary_writer") and self._summary_writer is None)):
                self._setup_summary_writer(logdir)
        else:
            self._summary_writer = None

        # Train model for given number of steps.
        self._trainer.train(self._session,
                            self._data_feeder.get_feed_dict_fn(
                                self._inp, self._out),
                            self.steps,
                            self._summary_writer,
                            self._summaries,
                            verbose=self.verbose,
                            early_stopping_rounds=self._early_stopping_rounds,
                            feed_params_fn=self._data_feeder.get_feed_params)
        return self
Example #3
0
    def fit(self, X, y, logdir=None):
        """Builds a neural network model given provided `model_fn` and training
        data X and y.

        Note: called first time constructs the graph and initializers
        variables. Consecutives times it will continue training the same model.
        This logic follows partial_fit() interface in scikit-learn.

        To restart learning, create new estimator.

        Args:
            X: matrix or tensor of shape [n_samples, n_features...]. Can be
            iterator that returns arrays of features. The training input
            samples for fitting the model.
            y: vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
            iterator that returns array of targets. The training target values
            (class labels in classification, real numbers in regression).
            logdir: the directory to save the log file that can be used for
            optional visualization.

        Returns:
            Returns self.
        """
        # Sets up data feeder.
        self._data_feeder = setup_train_data_feeder(X, y,
                                                    self.n_classes,
                                                    self.batch_size)
        if not self.continue_training or not self._initialized:
            # Sets up model and trainer.
            self._setup_training()
            # Initialize model parameters.
            self._trainer.initialize(self._session)
            self._initialized = True

        # Sets up summary writer for later optional visualization.
        # Due to not able to setup _summary_writer in __init__ as it's not a
        # parameter of the model, here we need to check if such variable exists
        # and if it's None or not (in case it was setup in a previous run).
        # It is initialized only in the case where it wasn't before and log dir
        # is provided.
        if logdir:
            if (not hasattr(self, "_summary_writer") or
                    (hasattr(self, "_summary_writer") and self._summary_writer is None)):
                self._setup_summary_writer(logdir)
        else:
            self._summary_writer = None

        # Train model for given number of steps.
        self._trainer.train(self._session,
                            self._data_feeder.get_feed_dict_fn(
                                self._inp, self._out),
                            self.steps,
                            self._summary_writer,
                            self._summaries,
                            verbose=self.verbose,
                            early_stopping_rounds=self._early_stopping_rounds,
                            feed_params_fn=self._data_feeder.get_feed_params)
        return self
Example #4
0
 def __init__(self,
              val_X,
              val_y,
              n_classes=0,
              print_steps=100,
              early_stopping_rounds=None):
     super(ValidationMonitor,
           self).__init__(print_steps=print_steps,
                          early_stopping_rounds=early_stopping_rounds)
     self.val_feeder = setup_train_data_feeder(val_X, val_y, n_classes, -1)
     self.print_val_loss_buffer = []
     self.all_val_loss_buffer = []