Exemplo n.º 1
0
def test_readme():
    import torch
    import matplotlib.pyplot as plt
    import bgflow as bg

    # define prior and target
    dim = 2
    prior = bg.NormalDistribution(dim)
    target = bg.DoubleWellEnergy(dim)

    # here we aggregate all layers of the flow
    layers = []
    layers.append(bg.SplitFlow(dim // 2))
    layers.append(
        bg.CouplingFlow(
            # we use a affine transformation to transform the RHS conditioned on the LHS
            bg.AffineTransformer(
                # use simple dense nets for the affine shift/scale
                shift_transformation=bg.DenseNet([dim // 2, 4, dim // 2],
                                                 activation=torch.nn.ReLU()),
                scale_transformation=bg.DenseNet([dim // 2, 4, dim // 2],
                                                 activation=torch.nn.Tanh()))))
    layers.append(bg.InverseFlow(bg.SplitFlow(dim // 2)))

    # now define the flow as a sequence of all operations stored in layers
    flow = bg.SequentialFlow(layers)

    # The BG is defined by a prior, target and a flow
    generator = bg.BoltzmannGenerator(prior, flow, target)

    # sample from the BG
    samples = generator.sample(1000)
    plt.hist2d(samples[:, 0].detach().numpy(),
               samples[:, 1].detach().numpy(),
               bins=100)
Exemplo n.º 2
0
    def _create_layers(self):
        dim_channel1 = self.dim // 2
        dim_channel2 = self.dim - dim_channel1
        split_into_2 = bg.SplitFlow(dim_channel1, dim_channel2)

        layers = [
            # -- split
            split_into_2,
            # --transform
            self._coupling_block(dim_channel1, dim_channel2),
            bg.SwapFlow(),
            self._coupling_block(dim_channel2, dim_channel1),
            # -- merge
            bg.InverseFlow(split_into_2)
        ]
        return layers
Exemplo n.º 3
0
# In[14]:

dim_ics = dim_bonds + dim_angles + dim_torsions + dim_cartesian
mean = torch.zeros(dim_ics).to(ctx)
# passing the mean explicitly to create samples on the correct device
prior = bg.NormalDistribution(dim_ics, mean=mean)

# ## Normalizing Flow
#
# Next, we set up the normalizing flow by stacking together different neural networks. For now, we will do this in a rather naive way, not distinguishing between bonds, angles, and torsions. Therefore, we will first define a flow that splits the output from the prior into the different IC terms.
#
# ### Split Layer

# In[15]:

split_into_ics_flow = bg.SplitFlow(dim_bonds, dim_angles, dim_torsions,
                                   dim_cartesian)

# In[16]:

# test
_ics = split_into_ics_flow(prior.sample(3))[:-1]
coordinate_transform.forward(*_ics, inverse=True)[0].shape

# ### Coupling Layers
#
# Next, we will set up so-called RealNVP coupling layers, which split the input into two channels and then learn affine transformations of channel 1 conditioned on channel 2. Here we will do the split naively between the first and second half of the degrees of freedom.

# In[17]:


class RealNVP(bg.SequentialFlow):
Exemplo n.º 4
0
In this example a simple Boltzmann Generator is created using coupling layers.
"""

import torch
import matplotlib.pyplot as plt
import bgflow as bg

# define prior and target
dim = 2
prior = bg.NormalDistribution(dim)
target = bg.DoubleWellEnergy(dim)

# here we aggregate all layers of the flow
layers = []
layers.append(bg.SplitFlow(dim // 2))
layers.append(
    bg.CouplingFlow(
        # we use a affine transformation to transform
        # the RHS conditioned on the LHS
        bg.AffineTransformer(
            # use simple dense nets for the affine shift/scale
            shift_transformation=bg.DenseNet([dim // 2, 4, dim // 2],
                                             activation=torch.nn.ReLU()),
            scale_transformation=bg.DenseNet([dim // 2, 4, dim // 2],
                                             activation=torch.nn.Tanh()))))
layers.append(bg.InverseFlow(bg.SplitFlow(dim // 2)))

# now define the flow as a sequence of all operations stored in layers
flow = bg.SequentialFlow(layers)
Exemplo n.º 5
0
def test_bg_basic(device, dtype):
    dim = 4
    mean = torch.zeros(dim, dtype=dtype, device=device)
    import bgflow as bg
    prior = bg.NormalDistribution(4, mean)
    # RealNVP
    flow = bg.SequentialFlow([
        bg.SplitFlow(dim // 2),
        bg.CouplingFlow(
            bg.AffineTransformer(bg.DenseNet([dim // 2, dim, dim // 2]),
                                 bg.DenseNet([dim // 2, dim, dim // 2]))),
        bg.SwapFlow(),
        bg.CouplingFlow(
            bg.AffineTransformer(bg.DenseNet([dim // 2, dim, dim // 2]),
                                 bg.DenseNet([dim // 2, dim, dim // 2]))),
        bg.SwapFlow(),
        bg.MergeFlow(dim // 2)
    ]).to(mean)
    target = bg.NormalDistribution(dim, mean)

    generator = bg.BoltzmannGenerator(prior, flow, target)

    # set parameters to 0 -> flow = id
    for p in generator.parameters():
        p.data.zero_()
    z = prior.sample(10)
    x, dlogp = flow.forward(z)
    assert torch.allclose(z, x)
    assert torch.allclose(dlogp, torch.zeros_like(dlogp))

    # Test losses
    generator.zero_grad()
    kll = generator.kldiv(100000)
    kll.mean().backward()
    # gradients should be small, as the network is already optimal
    for p in generator.parameters():
        assert torch.allclose(p.grad,
                              torch.zeros_like(p.grad),
                              rtol=0.0,
                              atol=5e-2)

    generator.zero_grad()
    samples = target.sample(100000)
    nll = generator.energy(samples)
    nll.mean().backward()
    # gradients should be small, as the network is already optimal
    for p in generator.parameters():
        assert torch.allclose(p.grad,
                              torch.zeros_like(p.grad),
                              rtol=0.0,
                              atol=5e-2)

    # just testing the API for the following:
    generator.log_weights(samples)
    z, dlogp = flow.forward(samples, inverse=True)
    generator.log_weights_given_latent(samples, z, dlogp)
    generator.sample(10000)
    generator.force(z)

    # test trainers
    trainer = bg.KLTrainer(generator)
    trainer.train(100, samples)