Esempio n. 1
0
 def predict(self,
             x,
             predict_fn,
             batch_size=None,
             steps=None):
     if batch_size is None and steps is None:
         batch_size = 32
     if x is None and steps is None:
         raise ValueError('If predicting from data tensors, '
                          'you should specify the `steps` '
                          'argument.')
     x, _ = self._standardize_data(x)
     inputs = x
     if self.uses_learning_phase:
         inputs += [0.]
     num_samples = utils.check_num_samples(
         inputs, batch_size=batch_size,
         steps=steps)
     steps = steps or ((num_samples + batch_size - 1) // batch_size)
     self._prepare_predict_hooks(steps_per_epoch=steps,
                                 predict_fn=predict_fn)
     self._make_predict_function()
     msg = "==>Start predicting"
     if num_samples:
         msg += " on %d samples" % num_samples
     logging.info(msg)
     sparse_indices = self._sparse_data_indices(inputs)
     self.function_loop(inputs,
                        self.predict_function,
                        sparse_indices=sparse_indices,
                        batch_size=batch_size,
                        steps=steps,
                        num_samples=num_samples)
     self.predict_function.end()
Esempio n. 2
0
 def evaluate(self,
              x=None,
              y=None,
              batch_size=None,
              validation_steps=None):
     if batch_size is None and validation_steps is None:
         batch_size = 32
     if x is None and y is None and validation_steps is None:
         raise ValueError("If evaluating from data tensors, "
                          "argument `validation_steps` must be set")
     x, y = self._standardize_data(x, y)
     inputs = x + y
     if self.uses_learning_phase:
         inputs += [0.]
     num_samples = utils.check_num_samples(
         inputs, batch_size=batch_size,
         steps=validation_steps)
     steps = validation_steps or (
             (num_samples + batch_size - 1) // batch_size)
     self._prepare_val_hooks(epochs=1,
                             steps_per_epoch=steps,
                             initial_epoch=0)
     self._make_eval_function()
     msg = "==>Start evaluating"
     if num_samples:
         msg += " on %d samples" % num_samples
     logging.info(msg)
     sparse_indices = self._sparse_data_indices(inputs)
     self.function_loop(inputs,
                        self.eval_function,
                        sparse_indices=sparse_indices,
                        batch_size=batch_size,
                        steps=validation_steps,
                        num_samples=num_samples)
     self.eval_function.end()
Esempio n. 3
0
 def fit(self,
         x=None,
         y=None,
         val_x=None,
         val_y=None,
         batch_size=None,
         shuffle=True,
         epochs=1,
         steps_per_epoch=None,
         validation_steps=None):
     if batch_size is None and steps_per_epoch is None:
         batch_size = 32
     if x is None and y is None and steps_per_epoch is None:
         raise ValueError("When fitting from data tensors,"
                          " `steps_per_epoch` must be specified")
     # prepare global_step
     step = self._load_global_step()
     if training_util.get_global_step() is None:
         fops.add_to_collection(
             name=fops.GraphKeys.GLOBAL_STEP,
             value=variables.Variable(
                 step, name='global_step', trainable=False))
     # build train function
     x, y = self._standardize_data(x, y)
     data = x + y
     if self.uses_learning_phase:  # [1.] flag for training
         data += [1.]
     num_train_samples = utils.check_num_samples(
         data, batch_size=batch_size,
         steps=steps_per_epoch)
     train_steps = steps_per_epoch or (
             (num_train_samples + batch_size - 1) // batch_size)
     initial_epoch = step // train_steps
     if epochs is not None:
         if epochs <= initial_epoch:
             logging.info("=>Skipping training since max epoch has already arrived")
             exit(0)
     self._prepare_train_hooks(epochs=epochs,
                               steps_per_epoch=train_steps,
                               initial_epoch=initial_epoch)
     self._make_train_function()
     # build val function
     validation = False
     num_val_samples = None
     if val_x is not None and val_y is not None:
         validation = True
         val_x, val_y = self._standardize_data(val_x, val_y)
         val_data = val_x + val_y
     elif validation_steps:
         validation = True
         val_data = []
     else:
         val_data = []
     if validation:
         if self.uses_learning_phase:  # [0.] flag for evaluation
             val_data += [0.]
         num_val_samples = utils.check_num_samples(
             val_data, batch_size=batch_size,
             steps=validation_steps)
         val_steps = validation_steps or (
                 (num_val_samples + batch_size - 1) // batch_size)
         self._prepare_val_hooks(epochs=epochs,
                                 steps_per_epoch=val_steps,
                                 initial_epoch=initial_epoch)
         self._make_eval_function()
     self.fit_loop(data=data,
                   val_data=val_data,
                   batch_size=batch_size,
                   shuffle=shuffle,
                   epochs=epochs,
                   initial_epoch=initial_epoch,
                   steps_per_epoch=steps_per_epoch,
                   validation_steps=validation_steps,
                   num_train_samples=num_train_samples,
                   num_val_samples=num_val_samples)