Пример #1
0
    def normal_flow(self, x, y_onehot):
        b, c, h, w = x.shape

        x, logdet = uniform_binning_correction(x)

        z, objective = self.flow(x, logdet=logdet, reverse=False)  # z_size = C

        mean, logs = self.prior(
            x, y_onehot)  # if condition, mean_size = C//2; else, mean_size = C

        if self.y_condition:
            z_y, z_n = split_feature(z, "split")
            self.mean_normal = torch.zeros(z_y.shape).cuda()
            self.logs_normal = torch.ones(z_y.shape).cuda()
            y_logits = self.project_class(z_y.mean(2).mean(2))
            objective += gaussian_likelihood(
                mean, logs, z_y) + gaussian_likelihood(self.mean_normal,
                                                       self.logs_normal, z_n)
        else:
            objective += gaussian_likelihood(mean, logs, z)
            y_logits = None

        # Full objective - converted to bits per dimension
        bpd = (-objective) / (math.log(2.) * c * h * w)

        return z, bpd, y_logits
    def normal_flow(self, x, y_onehot):
        b, c, h, w = x.shape

        x, logdet = uniform_binning_correction(x)

        z, objective = self.flow(x, logdet=logdet, reverse=False)

        mean, logs = self.prior(x, y_onehot)
        objective += gaussian_likelihood(mean, logs, z)
        logpz = gaussian_likelihood(mean, logs, z)

        if self.y_condition:
            y_logits = self.project_class(z.mean(2).mean(2))
        else:
            y_logits = None

        # Full objective - converted to bits per dimension
        bpd = (-objective) / (math.log(2.) * c * h * w)

        return z, bpd, y_logits, logpz
Пример #3
0
    def normal_flow(self, x, zn, y_onehot):
        b, c, h, w = x.shape

        # x, logdet = uniform_binning_correction(x)  # I GOT RID OF THIS BECAUSE IT INTRODUCES RANDOMNESS!
        logdet = 0.0  # Just to have something to pass into the next functions

        z, objective, zn = self.flow(x, zn, logdet=logdet, reverse=False)

        mean, logs = self.prior(x, y_onehot)
        objective += gaussian_likelihood(mean, logs, z)

        if self.y_condition:
            y_logits = self.project_class(z.mean(2).mean(2))
        else:
            y_logits = None

        # Full objective - converted to bits per dimension
        bpd = (-objective) / (math.log(2.) * c * h * w)

        return z, zn, bpd, y_logits
Пример #4
0
    def normal_flow(self, x, y_onehot, correction):
        b, c, h, w = x.shape

        if correction:
            x, logdet = uniform_binning_correction(x)
        else:
            logdet = torch.zeros(len(x)).to(x.device)

        z, logdet = self.flow(x, logdet=logdet, reverse=False)
        # print(logdet.max().item(), logdet.min().item())
        mean, logs = self.prior(x, y_onehot)
        prior = gaussian_likelihood(mean, logs, z)
        # print(prior.max().item(), prior.min().item())
        if self.y_condition:
            y_logits = self.project_class(z.mean(2).mean(2))
        else:
            y_logits = None

        # Full objective - converted to bits per dimension
        bpd = (-(prior + logdet)) / (math.log(2.) * c * h * w)

        return z, bpd, y_logits, (prior, logdet)