Ejemplo n.º 1
0
    def predict(self, x, batch_size=128, **kwargs):
        """
        Perform prediction for a batch of inputs. Note that for classification
        problems, it returns the resulting probabilities.

        :param x: Samples with shape as expected by the model.
        :type x: `np.ndarray`
        :param batch_size: Size of batches.
        :type batch_size: `int`
        :param kwargs: Dictionary of keras-specific arguments.
        :type kwargs: `dict`
        :return: Array of predictions
        :rtype: `np.ndarray`
        """
        try:
            if type(x) is np.ndarray:
                preds = self.model.predict(x, batch_size=batch_size, **kwargs)
            else:
                steps = self.steps_per_epoch
                if 'steps' in kwargs:
                    steps = kwargs['steps']

                if not type(x) is NumpyArrayIterator and not steps:
                    raise LocalTrainingException(
                        "Variable steps cannot be None for generator "
                        "not of type keras.utils.Sequence")
                preds = self.model.predict_generator(x, **kwargs)
        except Exception as ex:
            logger.exception(str(ex))
            raise LocalTrainingException("Error occurred during prediction.")
        return preds
Ejemplo n.º 2
0
    def fit_generator(self,
                      training_generator,
                      batch_size,
                      epochs,
                      steps_per_epoch=None):
        """
        Fits current model using model.fit_generator with provided training data generator.

        :param train_data: Training datagenerator of of type `keras.utils.Sequence`, \
        `keras.preprocessing.image.ImageDataGenerator`
        :type train_data: `ImageDataGenerator` or `keras.utils.Sequence`
        :param batch_size: Number of samples per gradient update.
        :type batch_size: Integer
        :param epochs: Number of epochs to train the model.
        :type epochs: Integer
        :param steps_per_epoch: Total number of steps (batches of samples) \
                to yield from `generator` before declaring one epoch. Optional 
                for `Sequence` data generator`
                as a number of steps.
        :type steps_per_epoch: `int`
        :return: None
        """

        if type(training_generator
                ) is NumpyArrayIterator and not steps_per_epoch:
            raise LocalTrainingException(
                "Variable steps_per_epoch cannot be None for generators not \
                    of type keras.utils.Sequence!")

        with self.graph.as_default():
            set_session(self.sess)
            self.model.fit_generator(training_generator,
                                     steps_per_epoch=steps_per_epoch,
                                     epochs=epochs)
Ejemplo n.º 3
0
    def evaluate_generator_model(self, test_generator, batch_size=32, **kwargs):
        """
        Evaluates the model based on the provided data generator.

        :param test_generator: Testing datagenerator of of type `keras.utils.Sequence`, \
        `keras.preprocessing.image.ImageDataGenerator`
        :type train_data: `ImageDataGenerator` or `keras.utils.Sequence`
        :param batch_size: Number of samples per gradient update.
        :type batch_size: Integer

        :return: metrics
        :rtype: `dict`
        """
        steps = self.steps_per_epoch
        if 'steps_per_epoch' in kwargs:
            steps = kwargs['steps_per_epoch']

        if not type(test_generator) is NumpyArrayIterator and not steps:
            raise LocalTrainingException(
                "Variable steps_per_epoch cannot be None for generator not of type keras.utils.Sequence")
        with self.graph.as_default():
            metrics = self.model.evaluate_generator(
                test_generator, steps=steps)
            names = self.model.metrics_names
            dict_metrics = {}

            if type(metrics) == list:
                for metric, name in zip(metrics, names):
                    dict_metrics[name] = metric
            else:
                dict_metrics[names[0]] = metrics

        return dict_metrics
Ejemplo n.º 4
0
    def evaluate_generator_model(self, test_generator, **kwargs):
        """
        Evaluates the model based on the provided data generator.

        :param test_generator: Testing datagenerator of type \
        `keras.utils.Sequence`, or \
        `keras.preprocessing.image.ImageDataGenerator`.
        :type test_generator: `ImageDataGenerator` or `keras.utils.Sequence`
        :return: metrics
        :rtype: `dict`
        """

        steps = self.steps_per_epoch
        if steps in kwargs:
            steps = kwargs.get('steps')

        if not type(test_generator) is NumpyArrayIterator and not steps:
            raise LocalTrainingException(
                "Variable steps cannot be None for generator "
                "not of type keras.utils.Sequence")

        metrics = self.model.evaluate_generator(test_generator, steps=steps)
        names = self.model.metrics_names
        dict_metrics = {}
        additional_metrics = {}

        if type(metrics) == list:
            for metric, name in zip(metrics, names):
                dict_metrics[name] = metric
        else:
            dict_metrics[names[0]] = metrics

        return dict_metrics
Ejemplo n.º 5
0
    def fit_generator(self, training_generator, epochs, steps_per_epoch=None):
        """
        Fits current model using model.fit_generator with provided
        training data generator.

        :param training_generator: Training datagenerator of type \
        `keras.utils.Sequence`, or \
        `keras.preprocessing.image.ImageDataGenerator`. \
        :type training_generator: `ImageDataGenerator` or \
        `keras.utils.Sequence`.
        :param epochs: Number of epochs to train the model.
        :type epochs: `int`
        :param steps_per_epoch: Total number of steps (batches of samples) \
                to yield from `generator` before declaring one epoch. \
                Optional for `Sequence` data generator` as a number of steps.
        :type steps_per_epoch: `int`
        :return: None
        """

        if type(training_generator
                ) is NumpyArrayIterator and not steps_per_epoch:
            raise LocalTrainingException(
                "Variable steps_per_epoch cannot be None for generators not \
                    of type keras.utils.Sequence!")

        self.model.fit_generator(training_generator,
                                 steps_per_epoch=steps_per_epoch,
                                 epochs=epochs)
Ejemplo n.º 6
0
    def update_model(self, model_update):
        """
        Update keras model with provided model_update, where model_update
        should be generated according to `KerasFLModel.get_model_update()`.

        :param model_update: `ModelUpdate` object that contains the weight \
        that will be used to update the model.
        :type model_update: `ModelUpdate`
        :return: None
        """
        if isinstance(model_update, ModelUpdate):
            w = model_update.get("weights")
            self.model.set_weights(w)
        else:
            raise LocalTrainingException('Provided model_update should be of '
                                         'type ModelUpdate. '
                                         'Instead they are:' +
                                         str(type(model_update)))
Ejemplo n.º 7
0
    def fit_model(self, train_data, fit_params=None):
        """
        Fits current model with provided training data.

        :param train_data: Training data, a tuple given in the form \
        (x_train, y_train) or a datagenerator of of type `keras.utils.Sequence`, \
        `keras.preprocessing.image.ImageDataGenerator`
        :type train_data: `np.ndarray`
        :param fit_params: (optional) Dictionary with hyperparameters \
        that will be used to call Keras fit function.\
        Hyperparameter parameters should match keras expected values \
        e.g., `epochs`, which specifies the number of epochs to be run. \
        If no `epochs` or `batch_size` are provided, a default value \
        will be used (1 and 128, respectively).
        :type fit_params: `dict`
        :return: None
        """
        # Initialized with default values
        batch_size = self.batch_size
        epochs = self.epochs
        steps_per_epoch = self.steps_per_epoch
        # Extract x_train and y_train, by default,
        # label is stored in the last column

        # extract hyperparams from fit_param
        if fit_params and ('hyperparams' in fit_params):
            hyperparams = fit_params['hyperparams']
            try:
                training_hp = hyperparams['local']['training']

                if 'batch_size' in training_hp:
                    batch_size = training_hp['batch_size']
                else:
                    # In this case, use default values.
                    logger.info('Using default hyperparameters: '
                                ' batch_size:' + str(self.batch_size))

                if 'epochs' in training_hp:
                    epochs = training_hp['epochs']
                else:
                    # In this case, use default values.
                    logger.info('Using default hyperparameters: '
                                ' epochs:' + str(self.epochs))

                if 'steps_per_epoch' in training_hp:
                    steps_per_epoch = training_hp.get('steps_per_epoch')

            except Exception as ex:
                logger.exception(str(ex))
                logger.warning('Hyperparams badly formed.')
                # In this case, use default values.
                logger.info('Using default hyperparameters: '
                            'epochs:' + str(self.epochs) + ' batch_size:' +
                            str(self.batch_size))

        try:

            if type(train_data) is tuple and type(train_data[0]) is np.ndarray:
                self.fit(train_data, batch_size=batch_size, epochs=epochs)

            else:
                self.fit_generator(train_data,
                                   batch_size=batch_size,
                                   epochs=epochs,
                                   steps_per_epoch=steps_per_epoch)

        except Exception as e:
            logger.exception(str(e))
            if epochs is None:
                logger.exception('epochs need to be provided')

            raise LocalTrainingException(
                'Error occurred while performing model.fit')
Ejemplo n.º 8
0
    def fit_model(self, train_data, fit_params=None):
        """
        Fits current model with provided training data.

        :param train_data: Training data, a tuple given in the form \
        (x_train, y_train) or a datagenerator of type `keras.utils.Sequence`, \
        `keras.preprocessing.image.ImageDataGenerator`
        :type train_data: `np.ndarray`
        :param fit_params: (optional) Dictionary with hyperparameters \
        that will be used to call Keras fit function.\
        Hyperparameter parameters should match keras expected values \
        e.g., `epochs`, which specifies the number of epochs to be run. \
        If no `epochs` or `batch_size` are provided, a default value \
        will be used (1 and 128, respectively).
        :type fit_params: `dict`
        :return: None
        """

        hyperparams = fit_params.get('hyperparams',
                                     {}) or {} if fit_params else {}
        local_hp = hyperparams.get('local', {}) or {}
        training_hp = local_hp.get('training', {}) or {}
        dp_hp = local_hp.get('privacy', {}) or {}
        op_hp = local_hp.get('optimizer', {}) or {}

        # Initialized with default values if not in training_hp
        batch_size = training_hp.get('batch_size', self.batch_size)
        epochs = training_hp.get('epochs', self.epochs)
        steps_per_epoch = training_hp.get('steps_per_epoch',
                                          self.steps_per_epoch)
        budget = dp_hp.get('budget', None)
        delta = dp_hp.get('delta', 0.005)
        lr = op_hp.get('lr', 0.01)

        logger.info('Training hps for this round => '
                    'batch_size: {}, epochs {}, steps_per_epoch {}'.format(
                        batch_size, epochs, steps_per_epoch))

        if epochs is None:
            logger.exception('epochs need to be provided')
            raise ModelException("Invalid hyperparams, epoch can't be None")

        try:

            if type(train_data) is tuple and type(train_data[0]) is np.ndarray:
                self.fit(train_data,
                         batch_size=batch_size,
                         epochs=epochs,
                         budget=budget,
                         delta=delta,
                         lr=lr)
            else:
                # batch_size won't be used for data generator
                self.fit_generator(train_data,
                                   epochs=epochs,
                                   steps_per_epoch=steps_per_epoch)

        except Exception as e:
            logger.exception(str(e))
            raise LocalTrainingException(
                'Error occurred while performing model.fit')