Beispiel #1
0
def test_sample_gaussian():
    """
    Test sample generation from mixture.sample_gaussian where covariance
    is diagonal, spherical and full
    """

    n_features, n_samples = 2, 300
    axis = 1
    mu = rng.randint(10) * rng.rand(n_features)
    cv = (rng.rand(n_features) + 1.0) ** 2

    samples = mixture.sample_gaussian(
        mu, cv, cvtype='diag', n_samples=n_samples)

    assert np.allclose(samples.mean(axis), mu, atol=1.3)
    assert np.allclose(samples.var(axis),  cv, atol=1.5)

    # the same for spherical covariances
    cv = (rng.rand() + 1.0) ** 2
    samples = mixture.sample_gaussian(
        mu, cv, cvtype='spherical', n_samples=n_samples)

    assert np.allclose(samples.mean(axis), mu, atol=1.5)
    assert np.allclose(
        samples.var(axis), np.repeat(cv, n_features), atol=1.5)

    # and for full covariances
    A = rng.randn(n_features, n_features)
    cv = np.dot(A.T, A) + np.eye(n_features)
    samples = mixture.sample_gaussian(
        mu, cv, cvtype='full', n_samples=n_samples)
    assert np.allclose(samples.mean(axis), mu, atol=1.3)
    assert np.allclose(np.cov(samples), cv, atol=2.5)
Beispiel #2
0
def test_sample_gaussian():
    # Test sample generation from mixture.sample_gaussian where covariance
    # is diagonal, spherical and full

    n_features, n_samples = 2, 300
    axis = 1
    mu = rng.randint(10) * rng.rand(n_features)
    cv = (rng.rand(n_features) + 1.0) ** 2

    samples = mixture.sample_gaussian(mu, cv, covariance_type="diag", n_samples=n_samples)

    assert_true(np.allclose(samples.mean(axis), mu, atol=1.3))
    assert_true(np.allclose(samples.var(axis), cv, atol=1.5))

    # the same for spherical covariances
    cv = (rng.rand() + 1.0) ** 2
    samples = mixture.sample_gaussian(mu, cv, covariance_type="spherical", n_samples=n_samples)

    assert_true(np.allclose(samples.mean(axis), mu, atol=1.5))
    assert_true(np.allclose(samples.var(axis), np.repeat(cv, n_features), atol=1.5))

    # and for full covariances
    A = rng.randn(n_features, n_features)
    cv = np.dot(A.T, A) + np.eye(n_features)
    samples = mixture.sample_gaussian(mu, cv, covariance_type="full", n_samples=n_samples)
    assert_true(np.allclose(samples.mean(axis), mu, atol=1.3))
    assert_true(np.allclose(np.cov(samples), cv, atol=2.5))

    # Numerical stability check: in SciPy 0.12.0 at least, eigh may return
    # tiny negative values in its second return value.
    from sklearn.mixture import sample_gaussian

    x = sample_gaussian([0, 0], [[4, 3], [1, 0.1]], covariance_type="full", random_state=42)
    assert_true(np.isfinite(x).all())
Beispiel #3
0
def test_sample_gaussian():
    """
    Test sample generation from mixture.sample_gaussian where covariance
    is diagonal, spherical and full
    """

    n_features, n_samples = 2, 300
    axis = 1
    mu = rng.randint(10) * rng.rand(n_features)
    cv = (rng.rand(n_features) + 1.0) ** 2

    samples = mixture.sample_gaussian(
        mu, cv, covariance_type='diag', n_samples=n_samples)

    assert_true(np.allclose(samples.mean(axis), mu, atol=1.3))
    assert_true(np.allclose(samples.var(axis), cv, atol=1.5))

    # the same for spherical covariances
    cv = (rng.rand() + 1.0) ** 2
    samples = mixture.sample_gaussian(
        mu, cv, covariance_type='spherical', n_samples=n_samples)

    assert_true(np.allclose(samples.mean(axis), mu, atol=1.5))
    assert_true(np.allclose(
        samples.var(axis), np.repeat(cv, n_features), atol=1.5))

    # and for full covariances
    A = rng.randn(n_features, n_features)
    cv = np.dot(A.T, A) + np.eye(n_features)
    samples = mixture.sample_gaussian(
        mu, cv, covariance_type='full', n_samples=n_samples)
    assert_true(np.allclose(samples.mean(axis), mu, atol=1.3))
    assert_true(np.allclose(np.cov(samples), cv, atol=2.5))
Beispiel #4
0
def test_sample_gaussian():
    """
    Test sample generation from mixture.sample_gaussian where covariance
    is diagonal, spherical and full
    """

    n_features, n_samples = 2, 300
    axis = 1
    mu = rng.randint(10) * rng.rand(n_features)
    cv = (rng.rand(n_features) + 1.0)**2

    samples = mixture.sample_gaussian(mu,
                                      cv,
                                      covariance_type='diag',
                                      n_samples=n_samples)

    assert_true(np.allclose(samples.mean(axis), mu, atol=1.3))
    assert_true(np.allclose(samples.var(axis), cv, atol=1.5))

    # the same for spherical covariances
    cv = (rng.rand() + 1.0)**2
    samples = mixture.sample_gaussian(mu,
                                      cv,
                                      covariance_type='spherical',
                                      n_samples=n_samples)

    assert_true(np.allclose(samples.mean(axis), mu, atol=1.5))
    assert_true(
        np.allclose(samples.var(axis), np.repeat(cv, n_features), atol=1.5))

    # and for full covariances
    A = rng.randn(n_features, n_features)
    cv = np.dot(A.T, A) + np.eye(n_features)
    samples = mixture.sample_gaussian(mu,
                                      cv,
                                      covariance_type='full',
                                      n_samples=n_samples)
    assert_true(np.allclose(samples.mean(axis), mu, atol=1.3))
    assert_true(np.allclose(np.cov(samples), cv, atol=2.5))

    # Numerical stability check: in SciPy 0.12.0 at least, eigh may return
    # tiny negative values in its second return value.
    from sklearn.mixture import sample_gaussian
    x = sample_gaussian([0, 0], [[4, 3], [1, .1]],
                        covariance_type='full',
                        random_state=42)
    print(x)
    assert_true(np.isfinite(x).all())
Beispiel #5
0
 def _generate_sample_from_state(self, state, random_state=None):
     if self._covariance_type == 'tied':
         cv = self._covars_
     else:
         cv = self._covars_[state]
     return sample_gaussian(self._means_[state], cv, self._covariance_type,
                            random_state=random_state)
 def execute(self, activation):
     self.activation = self.activation_fun(activation)
     # self.activation = max(0.01, self.activation)
     if self.t >= self.t_last_voc + 0 and self.activation > rand():
         self.m = sample_gaussian(self.mean, self.covar)
         self.t_last_voc = deepcopy(self.t)
     else:
         self.m = None
     self.t += 1
     return self.m
Beispiel #7
0
    def _generate_sample_from_state(self, state, random_state=None):
        if random_state is None:
            random_state = self.random_state
        random_state = check_random_state(random_state)

        cur_means = self.means_[state]
        cur_covs = self.covars_[state]
        cur_weights = self.weights_[state]

        i_gauss = random_state.choice(self.n_mix, p=cur_weights)
        mean = cur_means[i_gauss]
        if self.covariance_type == 'tied':
            cov = cur_covs
        else:
            cov = cur_covs[i_gauss]

        return sample_gaussian(mean, cov, self.covariance_type,
                               random_state=random_state)
Beispiel #8
0
    def _generate_sample_from_state(self, state, random_state=None):
        if random_state is None:
            random_state = self.random_state
        random_state = check_random_state(random_state)

        cur_means = self.means_[state]
        cur_covs = self.covars_[state]
        cur_weights = self.weights_[state]

        i_gauss = random_state.choice(self.n_mix, p=cur_weights)
        mean = cur_means[i_gauss]
        if self.covariance_type == 'tied':
            cov = cur_covs
        else:
            cov = cur_covs[i_gauss]

        return sample_gaussian(mean, cov, self.covariance_type,
                               random_state=random_state)
    def infer(self, in_dims, out_dims, x):
        if self.t < n_neighbors:
            raise ExplautoBootstrapError
        if in_dims == self.m_dims and out_dims == self.s_dims:  # forward
            dists, indexes = self.dataset.nn_x(x, k=1)
            return self.dataset.get_y(indexes[0])

        elif in_dims == self.s_dims and out_dims == self.m_dims:  # inverse
            if self.mode == 'explore':
                if not self.to_explore:
                    self.current_goal = x
                    dists, indexes = self.dataset.nn_y(x, k=1)
                    self.mean_explore = self.dataset.get_x(indexes[0])
                    self.to_explore = self.n_explore
                self.to_explore -= 1
                return sample_gaussian(self.mean_explore, self.sigma_expl ** 2)
            else:  # exploit'
                dists, indexes = self.dataset.nn_y(x, k=1)
                return self.dataset.get_x(indexes[0])

        else:
            raise NotImplementedError("NearestNeighbor only implements forward"
                                      "(M -> S) and inverse (S -> M) model, "
                                      "not general prediction")
Beispiel #10
0
    def infer(self, in_dims, out_dims, x):
        if self.t < n_neighbors:
            raise ExplautoBootstrapError
        if in_dims == self.m_dims and out_dims == self.s_dims:  # forward
            dists, indexes = self.dataset.nn_x(x, k=1)
            return self.dataset.get_y(indexes[0])

        elif in_dims == self.s_dims and out_dims == self.m_dims:  # inverse
            if self.mode == 'explore':
                if not self.to_explore:
                    self.current_goal = x
                    dists, indexes = self.dataset.nn_y(x, k=1)
                    self.mean_explore = self.dataset.get_x(indexes[0])
                    self.to_explore = self.n_explore
                self.to_explore -= 1
                return sample_gaussian(self.mean_explore, self.sigma_expl**2)
            else:  # exploit'
                dists, indexes = self.dataset.nn_y(x, k=1)
                return self.dataset.get_x(indexes[0])

        else:
            raise NotImplementedError("NearestNeighbor only implements forward"
                                      "(M -> S) and inverse (S -> M) model, "
                                      "not general prediction")
Beispiel #11
0
 def sample(self):
     return sample_gaussian(self.mean, self.covar)