Beispiel #1
0
##
a = DeterministicVariable(data=1.5, name='a', learnable=True)
b = DeterministicVariable(0.3, 'b')
c = DeterministicVariable(0.3, 'c')
d = NormalVariable((a * b + c), c + a**2, 'd')

##
print(a._get_sample(10))

##
e1 = BF.cat(
    (a, b), 2
)  #TODO: to change later, so that user does not have to specify dim explicitly (adjust cat)
e2 = BF.cat((a, c), 2)
f = NormalVariable(e1**2, e2**1, 'f')
g = NormalVariable(BF.relu(f), 1., 'g')

##
print(g._get_sample(10))

##
a_val = torch.tensor(0.25 * np.pi * np.ones((1, 1), dtype="float32"))
b_val = torch.tensor(0.25 * np.pi * np.ones((1, 1), dtype="float32"))
c_val = torch.tensor(2 * np.ones((1, 1), dtype="float32"))

##
z = BF.sin(a + b) / c

print(z.fn({a: a_val, b: b_val, c: c_val}))

##
Beispiel #2
0
    decoder3 = BF.BrancherFunction(
        DecoderArchitecture3(latent_size3=latent_size3, image_size=image_size))
    decoderLabel = BF.BrancherFunction(
        DecoderArchitectureLabel(latent_size2=latent_size2,
                                 num_classes=num_classes))

    # # Generative model
    z1sd = 1.5  # 1
    z2sd = 0.25  # 0.25
    z3sd = 0.15
    z1 = NormalVariable(np.zeros((latent_size1, )),
                        z1sd * np.ones((latent_size1, )),
                        name="z1")
    decoder_output1 = DeterministicVariable(decoder1(z1),
                                            name="decoder_output1")
    z2 = NormalVariable(BF.relu(decoder_output1["mean"]),
                        z2sd * np.ones((latent_size2, )),
                        name="z2")
    label_logits = DeterministicVariable(decoderLabel(z2), "label_logits")
    labels = CategoricalVariable(logits=label_logits, name="labels")
    decoder_output2 = DeterministicVariable(decoder2(z2),
                                            name="decoder_output2")
    z3 = NormalVariable(BF.relu(decoder_output2["mean"]),
                        z3sd * np.ones((latent_size3, )),
                        name="z3")
    decoder_output3 = DeterministicVariable(decoder3(z3),
                                            name="decoder_output3")
    x = BinomialVariable(total_count=1,
                         logits=decoder_output3["mean"],
                         name="x")
    model = ProbabilisticModel([x, z1, z2, z3, labels])
                                   name="x",
                                   is_observed=True)
        labels_test = EmpiricalVariable(output_labels_test,
                                        indices=minibatch_indices,
                                        name="labels",
                                        is_observed=True)

        # Forward pass
        in_channels = 1
        out_channels = 10
        image_size = 28
        Wk = NormalVariable(loc=np.zeros((out_channels, in_channels, 2, 2)),
                            scale=10 * np.ones(
                                (out_channels, in_channels, 2, 2)),
                            name="Wk")
        z = DeterministicVariable(BF.mean(BF.relu(BF.conv2d(x, Wk, stride=1)),
                                          (2, 3)),
                                  name="z")
        Wl = NormalVariable(loc=np.zeros((num_classes, out_channels)),
                            scale=10 * np.ones((num_classes, out_channels)),
                            name="Wl")
        b = NormalVariable(loc=np.zeros((num_classes, 1)),
                           scale=10 * np.ones((num_classes, 1)),
                           name="b")
        reshaped_z = BF.reshape(z, shape=(out_channels, 1))
        k = CategoricalVariable(logits=BF.linear(reshaped_z, Wl, b), name="k")

        # Probabilistic model
        model = ProbabilisticModel([k])

        # Observations
    # Initialize encoder and decoders
    encoder1 = BF.BrancherFunction(EncoderArchitecture1(image_size=image_size, latent_size3=latent_size3))
    encoder2 = BF.BrancherFunction(EncoderArchitecture2(latent_size2=latent_size2, latent_size3=latent_size3))
    encoder3 = BF.BrancherFunction(EncoderArchitecture3(latent_size1=latent_size1, latent_size2=latent_size2))

    decoder1 = BF.BrancherFunction(DecoderArchitecture1(latent_size1=latent_size1, latent_size2=latent_size2))
    decoder2 = BF.BrancherFunction(DecoderArchitecture2(latent_size2=latent_size2, latent_size3=latent_size3))
    decoder3 = BF.BrancherFunction(DecoderArchitecture3(latent_size3=latent_size3, image_size=image_size))

    # Generative model
    z1sd = 1.5 #1
    z2sd = 0.25 #0.25
    z3sd = 0.15
    z1 = NormalVariable(np.zeros((latent_size1,)), z1sd*np.ones((latent_size1,)), name="z1")
    decoder_output1 = DeterministicVariable(decoder1(z1), name="decoder_output1")
    z2 = NormalVariable(BF.relu(decoder_output1["mean"]), z2sd*np.ones((latent_size2,)), name="z2")
    decoder_output2 = DeterministicVariable(decoder2(z2), name="decoder_output2")
    z3 = NormalVariable(BF.relu(decoder_output2["mean"]), z3sd*np.ones((latent_size3,)), name="z3")
    decoder_output3 = DeterministicVariable(decoder3(z3), name="decoder_output3")
    x = BinomialVariable(total_count=1, logits=decoder_output3["mean"], name="x")
    model = ProbabilisticModel([x, z1, z2, z3])

    # Amortized variational distribution
    b_size = 200
    Qx = EmpiricalVariable(dataset, batch_size=b_size, name="x", is_observed=True)
    encoder_output1 = DeterministicVariable(encoder1(Qx), name="encoder_output1")
    Qz3 = NormalVariable(encoder_output1["mean"], encoder_output1["sd"], name="z3")
    encoder_output2 = DeterministicVariable(encoder2(encoder_output1["mean"]), name="encoder_output2")
    Qz2 = NormalVariable(encoder_output2["mean"], encoder_output2["sd"], name="z2")
    encoder_output3 = DeterministicVariable(encoder3(encoder_output2["mean"]), name="encoder_output3")
    Qz1 = NormalVariable(encoder_output3["mean"], encoder_output3["sd"], name="z1")
Beispiel #5
0
                           name="labels",
                           is_observed=True)

# Forward pass
in_channels = 1
out_channels1 = 10
out_channels2 = 20
image_size = 28
Wk1 = NormalVariable(loc=np.zeros((out_channels1, in_channels, 3, 3)),
                     scale=np.ones((out_channels1, in_channels, 3, 3)),
                     name="Wk1")
Wk2 = NormalVariable(loc=np.zeros((out_channels2, out_channels1, 3, 3)),
                     scale=np.ones((out_channels2, out_channels1, 3, 3)),
                     name="Wk2")
z = DeterministicVariable(BF.mean(
    BF.conv2d(BF.relu(BF.conv2d(x, Wk1, stride=2, padding=0)),
              Wk2,
              stride=2,
              padding=0), (2, 3)),
                          name="z")
Wl = NormalVariable(loc=np.zeros((num_classes, out_channels2)),
                    scale=np.ones((num_classes, out_channels2)),
                    name="Wl")
b = NormalVariable(loc=np.zeros((num_classes, 1)),
                   scale=np.ones((num_classes, 1)),
                   name="b")
reshaped_z = BF.reshape(z, shape=(out_channels2, 1))
k = CategoricalVariable(logits=BF.linear(reshaped_z, Wl, b), name="k")

# Probabilistic model
model = ProbabilisticModel([k])