Exemplo n.º 1
0
def test_alignment_centered():
    km1 = KernelMatrix(kernel=LinearKernel())
    km1.attach_to(gen_random_sample(num_samples, sample_dim))

    km2 = KernelMatrix(kernel=LinearKernel())
    km2.attach_to(gen_random_sample(num_samples, sample_dim))

    km3_bad_size = KernelMatrix(kernel=LinearKernel())
    km3_bad_size.attach_to(gen_random_sample(num_samples + 2, sample_dim))

    with raises(ValueError):
        alignment_centered(km1.full, km3_bad_size.full)

    # bad type : must be ndarray
    with raises(TypeError):
        alignment_centered(km1, km2.full)

    # bad type : must be ndarray
    with raises(TypeError):
        alignment_centered(km1.full, km2)

    for flag in (True, False):
        _ = alignment_centered(km1.full, km2.full, centered_already=flag)

    with raises(ValueError):
        _ = alignment_centered(np.zeros((10, 10)), randn(10, 10),
                               value_if_zero_division='raise')

    return_val_requested = 'random_set_value'
    with warns(UserWarning):
        ret_value = alignment_centered(randn(10, 10),
                                       np.zeros((10, 10)),
                                       value_if_zero_division=return_val_requested)
    if ret_value != return_val_requested:
        raise ValueError('Not returning the value requested in case of error!')
Exemplo n.º 2
0
class BaseKernelMachine(BaseEstimator):
    """Generic class to return a drop-in sklearn estimator.

    Parameters
    ----------
    k_func : KernelFunction
        The kernel function the kernel machine bases itself on

    learner_id : str
        Identifier for the estimator to be built based on the kernel function.
        Options: ``SVC`` and ``SVR``.
        Default: ``SVC`` (classifier version of SVM)

    normalized : flag
        Flag to indicate whether to keep the kernel matrix normalized
        Default: False

    """


    def __init__(self,
                 k_func=GaussianKernel(),
                 learner_id='SVC',
                 normalized=False):
        """
        Constructor for the KernelMachine class.

        Parameters
        ----------
        k_func : KernelFunction
            The kernel function the kernel machine bases itself on

        learner_id : str
            Identifier for the estimator to be built based on the kernel function.
            Options: ``SVC`` and ``SVR``.
            Default: ``SVC`` (classifier version of SVM)

        normalized : flag
            Flag to indicate whether to keep the kernel matrix normalized.
            Default: False
        """

        self.k_func = k_func
        self.learner_id = learner_id
        self.normalized = normalized
        self._estimator, self.param_grid = get_estimator(self.learner_id)


    def fit(self, X, y, sample_weight=None):
        """Fit the chosen Estimator based on the user-defined kernel.

        Parameters
        ----------
        X : {array-like, sparse matrix}, shape (n_samples, n_features)
            Training vectors, where n_samples is the number of samples
            and n_features is the number of features.
            For kernel="precomputed", the expected shape of X is
            (n_samples, n_samples).

        y : array-like, shape (n_samples,)
            Target values (class labels in classification, real numbers in
            regression)

        sample_weight : array-like, shape (n_samples,)
            Per-sample weights. Rescale C per sample. Higher weights
            force the classifier to put more emphasis on these points.

        Returns
        -------
        self : object

        Notes
        ------
        If X and y are not C-ordered and contiguous arrays of np.float64 and
        X is not a scipy.sparse.csr_matrix, X and/or y may be copied.

        If X is a dense array, then the other methods will not support sparse
        matrices as input.

        """

        if is_regressor(self):
            self._train_X, self._train_y = check_X_y(X, y, y_numeric=True)
            self._train_y = self._train_y.astype(np.float_)
        else:
            self._train_X, self._train_y = check_X_y(X, y)

        self._km = KernelMatrix(self.k_func, name='train_km',
                                normalized=self.normalized)
        self._km.attach_to(self._train_X)

        self._estimator.fit(X=self._km.full, y=self._train_y,
                            sample_weight=sample_weight)

        if is_classifier(self):
            self.classes_ = self._estimator.classes_

        return self


    def predict(self, X):
        """
        Make predictions on the new samplets in X.

        For an one-class model, +1 or -1 is returned.

        Parameters
        ----------
        X : {array-like, sparse matrix}, shape (n_samples, n_features)
            For kernel="precomputed", the expected shape of X is
            [n_samples_test, n_samples_train]

        Returns
        -------
        y_pred : array, shape (n_samples,)
            Class labels for samples in X.
        """

        if not hasattr(self, '_km'):
            raise NotFittedError("Can't predict. Not fitted yet. Run .fit() first!")

        test_X = check_array(X)

        # this is a fresh new KM
        self._km = KernelMatrix(self.k_func, name='test_km',
                                normalized=self.normalized)

        # sample_one must be test data to get the right shape for sklearn X
        self._km.attach_to(sample_one=test_X, sample_two=self._train_X)

        predicted_y = self._estimator.predict(self._km.full)

        return np.asarray(predicted_y, dtype=self._train_y.dtype)


    def get_params(self, deep=True):
        """returns all the relevant parameters for this estimator!"""

        return {'k_func'    : self.k_func,
                'normalized': self.normalized,
                'learner_id': self.learner_id}


    def set_params(self, **parameters):
        """Param setter"""

        for parameter, value in parameters.items():
            if parameter in ('k_func', 'learner_id', 'normalized'):
                setattr(self, parameter, value)

        return self
Exemplo n.º 3
0
def test_centering():

    km = KernelMatrix(kernel=LinearKernel())
    km.attach_to(gen_random_sample(num_samples, sample_dim))
    km.center()
Exemplo n.º 4
0
def test_normalize():

    km = KernelMatrix(kernel=LinearKernel())
    km.attach_to(gen_random_sample(num_samples, sample_dim))
    km.normalize()
Exemplo n.º 5
0
def gen_random_array(dim):
    """To better control precision and type of floats"""

    # TODO input sparse arrays for test
    return np.random.rand(dim)


def gen_random_sample(num_samples, sample_dim):
    """To better control precision and type of floats"""

    # TODO input sparse arrays for test
    return np.random.rand(num_samples, sample_dim)


km_lin = KernelMatrix(kernel=LinearKernel())
km_lin.attach_to(gen_random_sample(num_samples, sample_dim))


def simple_callable(x, y):
    return np.dot(x, y)


def test_kernel_from_callable():

    kf = KernelFromCallable(simple_callable)
    if not isinstance(kf, BaseKernelFunction):
        raise TypeError('Error in implementation of KernelFromCallable')

    _test_for_all_kernels(kf, 5)