Ejemplo n.º 1
0
    def __init__(self, num_flows, actnorm, affine, scale_fn_str, hidden_units,
                 activation, range_flow, augment_size, base_dist, cond_size):

        D = 2  # Number of data dimensions
        A = D + augment_size  # Number of augmented data dimensions
        P = 2 if affine else 1  # Number of elementwise parameters

        # initialize context. Only upsample context in ContextInit if latent shape doesn't change during the flow.
        context_init = MLP(input_size=cond_size,
                           output_size=D,
                           hidden_units=hidden_units,
                           activation=activation)

        # initialize flow with either augmentation or Abs surjection
        if augment_size > 0:
            assert augment_size % 2 == 0
            transforms = [Augment(StandardNormal((augment_size, )), x_size=D)]

        else:
            transforms = []
            transforms = [SimpleAbsSurjection()]
            if range_flow == 'logit':
                transforms += [
                    ScaleBijection(scale=torch.tensor([[1 / 4, 1 / 4]])),
                    Logit()
                ]
            elif range_flow == 'softplus':
                transforms += [SoftplusInverse()]

        # apply coupling layer flows
        for _ in range(num_flows):
            net = nn.Sequential(
                MLP(A // 2 + D,
                    P * A // 2,
                    hidden_units=hidden_units,
                    activation=activation), ElementwiseParams(P))
            if affine:
                transforms.append(
                    ConditionalAffineCouplingBijection(
                        net, scale_fn=scale_fn(scale_fn_str)))
            else:
                transforms.append(ConditionalAdditiveCouplingBijection(net))
            if actnorm: transforms.append(ActNormBijection(D))
            transforms.append(Reverse(A))

        transforms.pop()

        if base_dist == "uniform":
            base = StandardUniform((A, ))
        else:
            base = StandardNormal((A, ))

        super(SRFlow, self).__init__(base_dist=base,
                                     transforms=transforms,
                                     context_init=context_init)
Ejemplo n.º 2
0
    def test_data_dep_init(self):
        batch_size = 50
        shape = (32,)
        x = torch.randn(batch_size, *shape)
        bijection = ActNormBijection(shape[0])
        self.assertEqual(bijection.initialized, torch.zeros(1))
        z, ldj = bijection(x)
        self.assertEqual(bijection.initialized, torch.ones(1))

        self.eps = 1e-5
        self.assertEqual(z.mean(0), torch.zeros(z.shape[1]))
        self.assertEqual(z.std(0), torch.ones(z.shape[1]))
Ejemplo n.º 3
0
    def test_bijection_is_well_behaved(self):
        batch_size = 50

        self.eps = 1e-6
        for shape in [(32,),
                      (32,8),
                      (32,8,8)]:
            with self.subTest(shape=shape):
                x = torch.randn(batch_size, *shape)
                if len(shape) == 1:   bijection = ActNormBijection(shape[0])
                elif len(shape) == 2: bijection = ActNormBijection1d(shape[0])
                elif len(shape) == 3: bijection = ActNormBijection2d(shape[0])
                self.assert_bijection_is_well_behaved(bijection, x, z_shape=(batch_size, *shape))
Ejemplo n.º 4
0
A = 2 + args.augdim  # Number of augmented data dimensions
P = 2 if args.affine else 1  # Number of elementwise parameters

transforms = [Augment(StandardNormal((args.augdim, )), x_size=D)]
for _ in range(args.num_flows):
    net = nn.Sequential(
        MLP(A // 2,
            P * A // 2,
            hidden_units=args.hidden_units,
            activation=args.activation), ElementwiseParams(P))
    if args.affine:
        transforms.append(
            AffineCouplingBijection(net, scale_fn=scale_fn(args.scale_fn)))
    else:
        transforms.append(AdditiveCouplingBijection(net))
    if args.actnorm: transforms.append(ActNormBijection(D))
    transforms.append(Reverse(A))
transforms.pop()

model = Flow(base_dist=StandardNormal((A, )),
             transforms=transforms).to(args.device)

#######################
## Specify optimizer ##
#######################

if args.optimizer == 'adam':
    optimizer = Adam(model.parameters(), lr=args.lr)
elif args.optimizer == 'adamax':
    optimizer = Adamax(model.parameters(), lr=args.lr)
Ejemplo n.º 5
0
test_loader = DataLoader(test, batch_size=128, shuffle=True)

###########
## Model ##
###########


def net():
    return nn.Sequential(nn.Linear(1, 200), nn.ReLU(), nn.Linear(200, 100),
                         nn.ReLU(), nn.Linear(100, 2), ElementwiseParams(2))


model = Flow(base_dist=StandardNormal((2, )),
             transforms=[
                 AffineCouplingBijection(net()),
                 ActNormBijection(2),
                 Reverse(2),
                 AffineCouplingBijection(net()),
                 ActNormBijection(2),
                 Reverse(2),
                 AffineCouplingBijection(net()),
                 ActNormBijection(2),
                 Reverse(2),
                 AffineCouplingBijection(net()),
                 ActNormBijection(2),
             ])

###########
## Optim ##
###########
Ejemplo n.º 6
0
    def __init__(self, num_flows, actnorm, affine, scale_fn_str, hidden_units,
                 activation, range_flow, augment_size, base_dist):

        D = 2  # Number of data dimensions

        if base_dist == "uniform":
            classifier = MLP(D,
                             D // 2,
                             hidden_units=hidden_units,
                             activation=activation,
                             out_lambda=lambda x: x.view(-1))
            transforms = [
                ElementAbsSurjection(classifier=classifier),
                ShiftBijection(shift=torch.tensor([[0.0, 4.0]])),
                ScaleBijection(scale=torch.tensor([[1 / 4, 1 / 8]]))
            ]
            base = StandardUniform((D, ))

        else:
            A = D + augment_size  # Number of augmented data dimensions
            P = 2 if affine else 1  # Number of elementwise parameters

            # initialize flow with either augmentation or Abs surjection
            if augment_size > 0:
                assert augment_size % 2 == 0
                transforms = [
                    Augment(StandardNormal((augment_size, )), x_size=D)
                ]

            else:
                transforms = [SimpleAbsSurjection()]
                if range_flow == 'logit':
                    transforms += [
                        ScaleBijection(scale=torch.tensor([[1 / 4, 1 / 4]])),
                        Logit()
                    ]
                elif range_flow == 'softplus':
                    transforms += [SoftplusInverse()]

            # apply coupling layer flows
            for _ in range(num_flows):
                net = nn.Sequential(
                    MLP(A // 2,
                        P * A // 2,
                        hidden_units=hidden_units,
                        activation=activation), ElementwiseParams(P))

                if affine:
                    transforms.append(
                        AffineCouplingBijection(
                            net, scale_fn=scale_fn(scale_fn_str)))
                else:
                    transforms.append(AdditiveCouplingBijection(net))
                if actnorm: transforms.append(ActNormBijection(D))
                transforms.append(Reverse(A))

            transforms.pop()
            base = StandardNormal((A, ))

        super(UnconditionalFlow, self).__init__(base_dist=base,
                                                transforms=transforms)