示例#1
0
 def sample(self, nsamples):
     # Sampling priors
     samples = []
     fathers = self._sample_prior(nsamples=nsamples).tolist()
     for father in fathers:
         samples.append(self._sample_gaussian(self.means[father],
                                              self.variances[father]))
     return as_array(samples), as_array(fathers)
示例#2
0
 def sample(self, nsamples):
     # Sampling priors
     samples = []
     fathers = self._sample_prior(nsamples=nsamples).tolist()
     for father in fathers:
         samples.append(self._sample_gaussian(self.means[father],
                                              self.variances[father]))
     return as_array(samples), as_array(fathers)
示例#3
0
    def __init__(self, means=None, variances=None, priors=None, rng=None, seed=None):

        if means is None:
            means = map(lambda x:  10.0 * as_array(x), [[0, 0],
                                                        [1, 1],
                                                        [-1, -1],
                                                        [1, -1],
                                                        [-1, 1]])
        # Number of components
        self.ncomponents = len(means)
        self.dim = means[0].shape[0]
        self.means = means
        # If prior is not specified let prior be flat.
        if priors is None:
            priors = [1.0/self.ncomponents for _ in range(self.ncomponents)]
        self.priors = priors
        # If variances are not specified let variances be identity
        if variances is None:
            variances = [np.eye(self.dim) for _ in range(self.ncomponents)]
        self.variances = variances

        assert len(means) == len(variances), "Mean variances mismatch"
        assert len(variances) == len(priors), "prior mismatch"

        if rng is None:
            rng = npr.RandomState(seed=seed)
        self.rng = rng
示例#4
0
    def __init__(self, means=None, variances=None, priors=None, rng=None, seed=None):

        if means is None:
            means = map(lambda x:  10.0 * as_array(x), [[0, 0],
                                                        [1, 1],
                                                        [-1, -1],
                                                        [1, -1],
                                                        [-1, 1]])
        # Number of components
        self.ncomponents = len(means)
        self.dim = means[0].shape[0]
        self.means = means
        # If prior is not specified let prior be flat.
        if priors is None:
            priors = [1.0/self.ncomponents for _ in range(self.ncomponents)]
        self.priors = priors
        # If variances are not specified let variances be identity
        if variances is None:
            variances = [np.eye(self.dim) for _ in range(self.ncomponents)]
        self.variances = variances

        assert len(means) == len(variances), "Mean variances mismatch"
        assert len(variances) == len(priors), "prior mismatch"

        if rng is None:
            rng = npr.RandomState(seed=seed)
        self.rng = rng
示例#5
0
                              alpha=0.3)
    samples_latent_ax.set_title('Latent space. Epoch {}'.format(str(epoch)))

    # plt.legend(loc="upper left", bbox_to_anchor=[0, 1],
    #            shadow=True, title="Legend", fancybox=True)
    # latent_ax.get_legend()
    plt.tight_layout()

    if save_path is None:
        plt.show()
    else:
        plt.savefig(save_path, transparent=True, bbox_inches='tight')


if __name__ == '__main__':
    means = map(lambda x: as_array(x),
                [[0, 0], [1, 1], [-1, -1], [1, -1], [-1, 1]])
    std = 0.01
    variances = [np.eye(2) * std for _ in means]
    priors = [1.0 / len(means) for _ in means]

    gaussian_mixture = GaussianMixtureDistribution(means=means,
                                                   variances=variances,
                                                   priors=priors)
    originals, labels = gaussian_mixture.sample(1000)
    reconstructions = originals * np.random.normal(size=originals.shape,
                                                   scale=0.05)
    encodings = np.random.normal(size=(1000, 2))
    train_data = {
        'originals': originals,
        'labels': labels,
示例#6
0
def mouseevent_to_nparray(event):
    return as_array((event.xdata, event.ydata))
示例#7
0
        # sampling unit gaussians
        epsilons = self.rng.normal(size=(self.dim, ))

        return mean + np.linalg.cholesky(variance).dot(epsilons)

    def _gaussian_pdf(self, x, mean, variance):
        return multivariate_normal.pdf(x, mean=mean, cov=variance)

    def pdf(self, x):
        "Evaluates the the probability density function at the given point x"
        pdfs = map(lambda m, v, p: p * self._gaussian_pdf(x, m, v),
                   self.means, self.variances, self.priors)
        return reduce(lambda x, y: x + y, pdfs, 0.0)


if __name__ == '__main__':
    means = map(lambda x:  as_array(x), [[0, 0],
                                         [1, 1],
                                         [-1, -1],
                                         [1, -1],
                                         [-1, 1]])
    std = 0.01
    variances = [np.eye(2) * std for _ in means]
    priors = [1.0/len(means) for _ in means]

    gaussian_mixture = GaussianMixtureDistribution(means=means,
                                                   variances=variances,
                                                   priors=priors)
    gmdset = GaussianMixture(1000, means, variances, priors, sources=('features', ))