示例#1
0
    def search(self,
               epochs=None,
               callbacks=None,
               fit_on_val_data=False,
               **fit_kwargs):
        """Search for the best HyperParameters.

        If there is not early-stopping in the callbacks, the early-stopping callback
        is injected to accelerate the search process. At the end of the search, the
        best model will be fully trained with the specified number of epochs.

        # Arguments
            callbacks: A list of callback functions. Defaults to None.
            fit_on_val_data: Boolean. Use the training set and validation set for the
                final fit of the best model.
        """
        if self._finished:
            return

        if callbacks is None:
            callbacks = []

        # Insert early-stopping for adaptive number of epochs.
        if epochs is None:
            epochs = 1000
            if not utils.contain_instance(callbacks, tf_callbacks.EarlyStopping):
                callbacks.append(tf_callbacks.EarlyStopping(patience=10))

        # Insert early-stopping for acceleration.
        acceleration = False
        new_callbacks = self._deepcopy_callbacks(callbacks)
        if not utils.contain_instance(callbacks, tf_callbacks.EarlyStopping):
            acceleration = True
            new_callbacks.append(tf_callbacks.EarlyStopping(patience=10))

        super().search(epochs=epochs, callbacks=new_callbacks, **fit_kwargs)

        # Fully train the best model with original callbacks.
        if acceleration or fit_on_val_data:
            copied_fit_kwargs = copy.copy(fit_kwargs)
            if fit_on_val_data:
                # Concatenate training and validation data.
                copied_fit_kwargs['x'] = copied_fit_kwargs['x'].concatenate(
                    fit_kwargs['validation_data'])
                copied_fit_kwargs.pop('validation_data')
                # Remove early-stopping since no validation data.
                if utils.contain_instance(callbacks, tf_callbacks.EarlyStopping):
                    copied_fit_kwargs['callbacks'] = [
                        copy.deepcopy(callbacks)
                        for callback in callbacks
                        if not isinstance(callback, tf_callbacks.EarlyStopping)]
                    # Use best trial number of epochs.
                    copied_fit_kwargs['epochs'] = self._get_best_trial_epochs()
            model = self.final_fit(**copied_fit_kwargs)
        else:
            model = self.get_best_models()[0]

        model.save_weights(self.best_model_path)
        self._finished = True
示例#2
0
    def search(self,
               epochs=None,
               callbacks=None,
               validation_split=0,
               verbose=1,
               **fit_kwargs):
        """Search for the best HyperParameters.

        If there is not early-stopping in the callbacks, the early-stopping callback
        is injected to accelerate the search process. At the end of the search, the
        best model will be fully trained with the specified number of epochs.

        # Arguments
            callbacks: A list of callback functions. Defaults to None.
            validation_split: Float.
        """
        if self._finished:
            return

        if callbacks is None:
            callbacks = []

        self.hypermodel.hypermodel.set_fit_args(validation_split,
                                                epochs=epochs)

        # Insert early-stopping for adaptive number of epochs.
        epochs_provided = True
        if epochs is None:
            epochs_provided = False
            epochs = 1000
            if not utils.contain_instance(callbacks,
                                          tf_callbacks.EarlyStopping):
                callbacks.append(
                    tf_callbacks.EarlyStopping(patience=10, min_delta=1e-4))

        # Insert early-stopping for acceleration.
        early_stopping_inserted = False
        new_callbacks = self._deepcopy_callbacks(callbacks)
        if not utils.contain_instance(callbacks, tf_callbacks.EarlyStopping):
            early_stopping_inserted = True
            new_callbacks.append(
                tf_callbacks.EarlyStopping(patience=10, min_delta=1e-4))

        # Populate initial search space.
        hp = self.oracle.get_space()
        self._prepare_model_build(hp, **fit_kwargs)
        self.hypermodel.build(hp)
        self.oracle.update_space(hp)

        super().search(epochs=epochs,
                       callbacks=new_callbacks,
                       verbose=verbose,
                       **fit_kwargs)

        # Train the best model use validation data.
        # Train the best model with enough number of epochs.
        if validation_split > 0 or early_stopping_inserted:
            copied_fit_kwargs = copy.copy(fit_kwargs)

            # Remove early-stopping since no validation data.
            # Remove early-stopping since it is inserted.
            copied_fit_kwargs["callbacks"] = self._remove_early_stopping(
                callbacks)

            # Decide the number of epochs.
            copied_fit_kwargs["epochs"] = epochs
            if not epochs_provided:
                copied_fit_kwargs["epochs"] = self._get_best_trial_epochs()

            # Concatenate training and validation data.
            if validation_split > 0:
                copied_fit_kwargs["x"] = copied_fit_kwargs["x"].concatenate(
                    fit_kwargs["validation_data"])
                copied_fit_kwargs.pop("validation_data")

            self.hypermodel.hypermodel.set_fit_args(
                0, epochs=copied_fit_kwargs["epochs"])
            pipeline, model = self.final_fit(**copied_fit_kwargs)
        else:
            model = self.get_best_models()[0]
            pipeline = pipeline_module.load_pipeline(
                self._pipeline_path(
                    self.oracle.get_best_trials(1)[0].trial_id))

        model.save(self.best_model_path)
        pipeline.save(self.best_pipeline_path)
        self._finished = True
示例#3
0
    def search(self,
               epochs=None,
               callbacks=None,
               fit_on_val_data=False,
               **fit_kwargs):
        """Search for the best HyperParameters.

        If there is not early-stopping in the callbacks, the early-stopping callback
        is injected to accelerate the search process. At the end of the search, the
        best model will be fully trained with the specified number of epochs.

        # Arguments
            callbacks: A list of callback functions. Defaults to None.
            fit_on_val_data: Boolean. Use the training set and validation set for the
                final fit of the best model.
        """
        if self._finished:
            return

        if callbacks is None:
            callbacks = []

        # Insert early-stopping for adaptive number of epochs.
        epochs_provided = True
        if epochs is None:
            epochs_provided = False
            epochs = 1000
            if not utils.contain_instance(callbacks, tf_callbacks.EarlyStopping):
                callbacks.append(tf_callbacks.EarlyStopping(patience=10))

        # Insert early-stopping for acceleration.
        early_stopping_inserted = False
        new_callbacks = self._deepcopy_callbacks(callbacks)
        if not utils.contain_instance(callbacks, tf_callbacks.EarlyStopping):
            early_stopping_inserted = True
            new_callbacks.append(tf_callbacks.EarlyStopping(patience=10))

        # Populate initial search space.
        hp = self.oracle.get_space()
        self.hypermodel.build(hp)
        self.oracle.update_space(hp)

        super().search(epochs=epochs, callbacks=new_callbacks, **fit_kwargs)

        # Train the best model use validation data.
        # Train the best model with enought number of epochs.
        if fit_on_val_data or early_stopping_inserted:
            copied_fit_kwargs = copy.copy(fit_kwargs)

            # Remove early-stopping since no validation data.
            # Remove early-stopping since it is inserted.
            copied_fit_kwargs['callbacks'] = self._remove_early_stopping(callbacks)

            # Decide the number of epochs.
            copied_fit_kwargs['epochs'] = epochs
            if not epochs_provided:
                copied_fit_kwargs['epochs'] = self._get_best_trial_epochs()

            # Concatenate training and validation data.
            if fit_on_val_data:
                copied_fit_kwargs['x'] = copied_fit_kwargs['x'].concatenate(
                    fit_kwargs['validation_data'])
                copied_fit_kwargs.pop('validation_data')

            model = self.final_fit(**copied_fit_kwargs)
        else:
            model = self.get_best_models()[0]

        model.save_weights(self.best_model_path)
        self._finished = True