Ejemplo n.º 1
0
    def test_log_prob(self):
        mu = ch.Variable(np.array(0.))
        sigma = ch.Variable(np.array(1.))
        cond_value = ch.Variable(np.array(0.5))

        x = StochasticVariable(dist.Normal, mu, sigma)
        x.condition(cond_value)
        self.assertEqual(x.log_prob.data,
                         dist.Normal(mu, sigma).log_prob(cond_value).data)
Ejemplo n.º 2
0
 def to_dist_fn(h):
     loc, scale = h
     xp = cuda.get_array_module(loc, scale)
     return distributions.HyperbolicWrapped(
         distributions.Independent(
             D.Normal(loc=xp.zeros(shape=scale.shape,
                                   dtype=scale.dtype),
                      scale=scale)),
         functions.pseudo_polar_projection(loc))
Ejemplo n.º 3
0
 def to_dist_fn(h):
     xp = cuda.get_array_module(h)
     scale = F.softplus(h[..., n_latent:])
     shape = scale.shape[:-1] + (n_latent, )
     return distributions.HyperbolicWrapped(
         distributions.Independent(
             D.Normal(loc=xp.zeros(shape=shape, dtype=scale.dtype),
                      scale=scale)),
         functions.pseudo_polar_projection(h[..., :n_latent]))
Ejemplo n.º 4
0
 def to_dist_fn(h):
     mu, ln_sigma = h
     xp = cuda.get_array_module(*h)
     scale = F.softplus(ln_sigma)
     return distributions.HyperbolicWrapped(
         distributions.Independent(D.Normal(
             loc=xp.zeros(shape=scale.shape, dtype=scale.dtype),
             scale=scale)),
         functions.pseudo_polar_projection(mu))
Ejemplo n.º 5
0
    def __call__(self, x):
        h = x
        for layer in self._hidden_layers:
            h = F.relu(layer(h))
        #for
        theta = F.sigmoid(self._output_layer(h))

        mu    = 4*theta[:, 0]-2
        sigma = 0.5*theta[:, 1]+self._eps
        return D.Normal(mu, sigma)
Ejemplo n.º 6
0
 def make_normal_dist(self, is_gpu=False, use_log_scale=False):
     loc = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
     if use_log_scale:
         log_scale = numpy.random.uniform(
             -1, 1, self.shape).astype(numpy.float32)
         params = self.encode_params(
             {"loc": loc, "log_scale": log_scale}, is_gpu)
     else:
         scale = numpy.exp(
             numpy.random.uniform(-1, 1, self.shape)).astype(numpy.float32)
         params = self.encode_params({"loc": loc, "scale": scale}, is_gpu)
     return distributions.Normal(**params)
Ejemplo n.º 7
0
    def negative_log_likelihood(self, x, y):
        pi, mu, log_var = self.get_gaussian_params(x)

        # Likelihood over different Gaussians
        y = F.tile(y[:, None, :], (1, self.gaussian_mixtures, 1))
        pi = F.tile(F.expand_dims(pi, 2), (1, 1, self.input_dim))

        squared_sigma = F.exp(log_var)
        sigma = F.sqrt(squared_sigma)
        prob = F.sum(pi * distributions.Normal(mu, sigma).prob(y), axis=1)

        negative_log_likelihood = -F.log(prob)
        return F.mean(negative_log_likelihood)
Ejemplo n.º 8
0
    def encode(self, x, **kwargs):
        # x = self.adapt(x)
        self.in_shape = x.shape[1:]
        self.maybe_init(self.in_shape)

        x_ = x.reshape(-1, self.in_size)
        mu = self.mu(x_)

        if kwargs.get('show_shape'):
            print(f'layer(E{self.name}): in: {x.shape} out: {mu.shape}')

        if kwargs.get('inference'):
            return mu  # == D.Normal(loc=mu, log_scale=ln_sigma).mean
        else:
            ln_sigma = self.ln_sigma(x_)  # log(sigma)
            # return mu, ln_sigma
            return D.Normal(loc=mu, log_scale=ln_sigma)
Ejemplo n.º 9
0
 def to_dist_fn(h):
     if ndim == 1:
         dim_h = h.shape[-1]
         loc = h[..., :(dim_h // 2)]
         base_sigma = h[..., (dim_h // 2):]
     elif ndim == 3:
         nb_channel = h.shape[-3]
         loc = h[..., :(nb_channel // 2), :, :]
         base_sigma = h[..., (nb_channel // 2):, :, :]
     else:
         raise NotImplementedError
     base_sigma += xp_functions._softplus_inverse(1.0)
     return distributions.Independent(D.Normal(
         loc=loc,
         scale=F.softplus(
             functions.clamp(base_sigma,
                             xp_functions._softplus_inverse(0.001)))),
                                      reinterpreted_batch_ndims=ndim)
Ejemplo n.º 10
0
    def setUp_configure(self):
        from scipy import stats
        self.dist = lambda **params: distributions.Independent(
            distributions.Normal(**params), self.reinterpreted_batch_ndims)

        self.test_targets = set(
            ["batch_shape", "entropy", "event_shape", "log_prob", "support"])

        loc = utils.force_array(
            numpy.random.uniform(-1, 1, self.full_shape).astype(numpy.float32))
        scale = utils.force_array(
            numpy.exp(numpy.random.uniform(-1, 1, self.full_shape)).astype(
                numpy.float32))

        if self.reinterpreted_batch_ndims is None:
            reinterpreted_batch_ndims = max(0, len(self.inner_shape) - 1)
        else:
            reinterpreted_batch_ndims = self.reinterpreted_batch_ndims

        batch_ndim = len(self.inner_shape) - reinterpreted_batch_ndims
        self.shape = self.inner_shape[:batch_ndim]
        self.event_shape = \
            self.inner_shape[batch_ndim:] + self.inner_event_shape
        d = functools.reduce(operator.mul, self.event_shape, 1)

        if self.event_shape == ():
            self.scipy_dist = stats.norm

            self.params = {"loc": loc, "scale": scale}
            self.scipy_params = {"loc": loc, "scale": scale}

        else:
            self.scipy_dist = stats.multivariate_normal

            scale_tril = numpy.eye(d).astype(numpy.float32) * \
                scale.reshape(self.shape + (d,))[..., None]
            cov = numpy.einsum('...ij,...jk->...ik', scale_tril, scale_tril)

            self.params = {"loc": loc, "scale": scale}
            self.scipy_params = {
                "mean": numpy.reshape(loc, self.shape + (d, )),
                "cov": cov
            }
Ejemplo n.º 11
0
    def __call__(self, x):
        h = F.relu(self.c1(x))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.c2(h))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.c3(h))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.c4(h))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.c5(h))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.c6(h))

        h = self.c7(h)

        return D.Normal(loc=h[:, :256], log_scale=h[:, 256:])
Ejemplo n.º 12
0
    def __call__(self, x):
        h = F.relu(self.bn_c1(self.c1(x)))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.res2(h))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.res3(h))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.res4(h))
        h = F.average_pooling_2d(h, 2, 2, 0)

        h = F.relu(self.res5(h))
        # h = F.average_pooling_2d(h, 2, 2, 0)

        # h = F.relu(self.res6(h))

        # h = self.c7(h)
        h = self.bn_l6(self.l6(h))
        # s = self.bn_std(self.l_std(h))

        return D.Normal(loc=h[:, :self.n_ch * 4],
                        log_scale=h[:, self.n_ch * 4:])
Ejemplo n.º 13
0
 def to_dist_fn(h):
     return distributions.Independent(
         D.Normal(loc=h[..., :n_latent],
                  scale=F.softplus(h[..., n_latent:])))
Ejemplo n.º 14
0
 def to_dist_fn(h):
     mu, ln_sigma = h
     return distributions.Independent(D.Normal(
         loc=mu, scale=F.softplus(ln_sigma)))
Ejemplo n.º 15
0
 def __call__(self):
     return D.Normal(self.loc, scale=self.scale)
Ejemplo n.º 16
0
 def to_dist_fn(h):
     loc, scale = h
     return distributions.Independent(D.Normal(loc, scale),
                                      reinterpreted_batch_ndims=1)
Ejemplo n.º 17
0
    def normal_prob(self, y, mu, log_var):
        squared_sigma = F.exp(log_var)
        sigma = F.sqrt(squared_sigma)
        d = distributions.Normal(mu, scale=sigma)

        return F.clip(d.prob(y), 0., 1.)
Ejemplo n.º 18
0
 def __call__(self, x):
     h = F.tanh(self.fc1(x))
     ave = self.fc2_ave(h)
     var = self.fc2_var(h)  # log(sigma)
     return D.Normal(loc=ave, log_scale=var)
Ejemplo n.º 19
0
 def test_batch_ndim_error(self):
     with self.assertRaises(ValueError):
         distributions.Independent(distributions.Normal(**self.params),
                                   len(self.inner_shape) + 1)
Ejemplo n.º 20
0
 def forward(self):
     return D.Normal(self.loc, scale=self.scale)
Ejemplo n.º 21
0
 def forward(self, x):
     h = F.tanh(self.linear(x))
     mu = self.mu(h)
     ln_sigma = self.ln_sigma(h)  # log(sigma)
     return D.Independent(D.Normal(loc=mu, log_scale=ln_sigma))
Ejemplo n.º 22
0
 def forward(self):
     return D.Independent(D.Normal(self.loc, scale=self.scale),
                          reinterpreted_batch_ndims=1)