Exemplo n.º 1
0
    def negative_iwae_bound_for(self, x, y, c, iw):
        """
        Computes the Importance Weighted Autoencoder Bound
        Additionally, we also compute the ELBO KL and reconstruction terms

        Returns:
            niwae: tensor: (): Negative IWAE bound
            kl: tensor: (): ELBO KL divergence to prior
            rec: tensor: (): ELBO Reconstruction term
        """
        # encode
        #print(x.shape, y.shape)

        qm, qv = self.enc.encode(x, y=y, c=c)

        # replicate qm, qv
        q_shape = list(qm.shape)
        qm = qm.unsqueeze(1).expand(q_shape[0], iw, *q_shape[1:])
        qv = qv.unsqueeze(1).expand(q_shape[0], iw, *q_shape[1:])

        # sample z(1)...z(iw) (for monte carlo estimate of p(x|z(1))
        z = ut.sample_gaussian(qm, qv)

        kl_elem = self.kl_elementwise(z, qm, qv)

        # reshape for LSTM
        # replicate z, x, y, c
        z_shape = list(z.shape)
        z = z.reshape(z_shape[0] * iw, *z_shape[2:])

        x_shape = list(x.shape)
        x = x.unsqueeze(1).expand(x_shape[0], iw, *x_shape[1:])
        x = x.reshape(x_shape[0] * iw, *x_shape[1:])

        if y is not None:
            y_shape = list(y.shape)
            y = y.unsqueeze(1).expand(y_shape[0], iw, *y_shape[1:])
            y = y.reshape(y_shape[0] * iw, *y_shape[1:])
        if c is not None:
            c_shape = list(c.shape)
            c = c.unsqueeze(1).expand(c_shape[0], iw, *c_shape[1:])
            c = c.reshape(c_shape[0] * iw, *c_shape[1:])

        # decode
        mu, var = self.dec.decode(z, y=y, c=c)

        nll, rec_mse, rec_var = ut.nlog_prob_normal(mu=mu,
                                                    y=x,
                                                    var=var,
                                                    fixed_var=self.warmup,
                                                    var_pen=self.var_pen)
        log_prob, rec_mse, rec_var = -nll, rec_mse.mean(), rec_var.mean()

        log_prob = log_prob.view(x_shape[0], iw)
        niwae = -ut.log_mean_exp(log_prob - kl_elem, dim=1).mean(-1)

        # reduce
        rec = -log_prob.mean(1).mean(-1)
        kl = kl_elem.mean(1).mean(-1)
        return niwae, kl, rec, rec_mse, rec_var
Exemplo n.º 2
0
    def negative_elbo_bound(self, x):
        """
        Computes the Evidence Lower Bound, KL and, Reconstruction costs

        Args:
            x: tensor: (batch, dim): Observations

        Returns:
            nelbo: tensor: (): Negative evidence lower bound
            kl: tensor: (): ELBO KL divergence to prior
            rec: tensor: (): ELBO Reconstruction term
        """
        # encode
        qm, qv = self.enc.encode(x)

        # sample z(1) (for monte carlo estimate of p(x|z(1))
        z = ut.sample_gaussian(qm, qv)

        # decode
        mu, var = self.dec.decode(z)

        kl = self.kl_elem(z, qm, qv).mean(-1)

        nll, rec_mse, rec_var = ut.nlog_prob_normal(mu=mu,
                                                    y=x,
                                                    var=var,
                                                    fixed_var=self.warmup,
                                                    var_pen=self.var_pen)
        rec, rec_mse, rec_var = nll.mean(-1), rec_mse.mean(-1), rec_var.mean(
            -1)

        nelbo = kl + rec
        return nelbo, kl, rec, rec_mse, rec_var
Exemplo n.º 3
0
    def negative_iwae_bound_for(self, x, x_hat, y, c, iw):
        """
        Computes the Importance Weighted Autoencoder Bound
        Additionally, we also compute the ELBO KL and reconstruction terms

        Args:
            x: tensor: (batch, dim): Observations
            x_hat: tensor: (batch, dim): Observations
            y: tensor: (batch, y_dim): whether observations contain EV
            c: tensor: (batch, c_dim): target mapping specification
            iw: int: (): Number of importance weighted samples

        Returns:
            niwae: tensor: (): Negative IWAE bound
            kl: tensor: (): ELBO KL divergence to prior
            rec: tensor: (): ELBO Reconstruction term
        """
        # encode
        qm, qv = self.enc.encode(x, y=y)

        # replicate qm, qv
        q_shape = list(qm.shape)
        qm = qm.unsqueeze(1).expand(q_shape[0], iw, *q_shape[1:])
        qv = qv.unsqueeze(1).expand(q_shape[0], iw, *q_shape[1:])
        # replicate x, y, c
        x_shape = list(x_hat.shape)
        x_hat = x_hat.unsqueeze(1).expand(x_shape[0], iw, *x_shape[1:])
        y_shape = list(y.shape)
        y = y.unsqueeze(1).expand(y_shape[0], iw, *y_shape[1:])
        c_shape = list(c.shape)
        c = c.unsqueeze(1).expand(c_shape[0], iw, *c_shape[1:])

        # sample z(1)...z(iw) (for monte carlo estimate of p(x|z(1))
        z = ut.sample_gaussian(qm, qv)

        kl_elem = self.kl_elem(z, qm, qv)

        # decode
        mu, var = self.dec.decode(z, y=y, c=c)

        nll, rec_mse, rec_var = ut.nlog_prob_normal(
            mu=mu, y=x_hat, var=var, fixed_var=self.warmup, var_pen=self.var_pen)
        log_prob, rec_mse, rec_var = -nll, rec_mse.mean(), rec_var.mean()

        niwae = -ut.log_mean_exp(log_prob - kl_elem, dim=1).mean(-1)

        # reduce
        rec = -log_prob.mean(1).mean(-1)
        kl = kl_elem.mean(1).mean(-1)
        return niwae, kl, rec, rec_mse, rec_var
Exemplo n.º 4
0
    def negative_elbo_bound_for(self, x, x_hat, y, c):
        qm, qv = self.enc.encode(x, y=y)
        # sample z(1) (for monte carlo estimate of p(x|z(1))
        z = ut.sample_gaussian(qm, qv)

        kl = self.kl_elem(z, qm, qv)

        # decode
        mu, var = self.dec.decode(z, y=y, c=c)
        rec, rec_mse, rec_var = ut.nlog_prob_normal(
            mu=mu, y=x_hat, var=var, fixed_var=self.warmup, var_pen=self.var_pen)

        # reduce
        kl = kl.mean(-1)
        rec, rec_mse, rec_var = rec.mean(-1), rec_mse.mean(-1), rec_var.mean(-1)
        nelbo = kl + rec
        return nelbo, kl, rec, rec_mse, rec_var
Exemplo n.º 5
0
    def negative_iwae_bound(self, x, iw):
        """
        Computes the Importance Weighted Autoencoder Bound
        Additionally, we also compute the ELBO KL and reconstruction terms

        Args:
            x: tensor: (batch, dim): Observations
            iw: int: (): Number of importance weighted samples

        Returns:
            niwae: tensor: (): Negative IWAE bound
            kl: tensor: (): ELBO KL divergence to prior
            rec: tensor: (): ELBO Reconstruction term
        """
        # encode
        qm, qv = self.enc.encode(x)

        # replicate qm, qv
        q_shape = list(qm.shape)
        qm = qm.unsqueeze(1).expand(q_shape[0], iw, *q_shape[1:])
        qv = qv.unsqueeze(1).expand(q_shape[0], iw, *q_shape[1:])
        # replicate x
        x_shape = list(x.shape)
        x = x.unsqueeze(1).expand(x_shape[0], iw, *x_shape[1:])

        # sample z(1)...z(iw) (for monte carlo estimate of p(x|z(1))
        z = ut.sample_gaussian(qm, qv)

        # decode
        mu, var = self.dec.decode(z)

        kl_elem = self.kl_elem(z, qm, qv)

        nll, rec_mse, rec_var = ut.nlog_prob_normal(mu=mu,
                                                    y=x,
                                                    var=var,
                                                    fixed_var=self.warmup,
                                                    var_pen=self.var_pen)
        log_prob, rec_mse, rec_var = -nll, rec_mse.mean(), rec_var.mean()

        niwae = -ut.log_mean_exp(log_prob - kl_elem, dim=1).mean(-1)
        rec = -log_prob.mean(1).mean(-1)
        kl = kl_elem.mean(1).mean(-1)
        return niwae, kl, rec, rec_mse, rec_var