Esempio n. 1
0
 def construct_biases(self, learnable, ranges, kwargs):
     for parameter_name, value in kwargs.items():
         if isinstance(value, (Variable, PartialLink)):
             if isinstance(value, np.ndarray):
                 dim = value.shape[0]
             elif isinstance(value, numbers.Number):
                 dim = 1
             else:
                 dim = []
             bias = RootVariable(0.,
                                 self.name + "_" + parameter_name + "_" +
                                 "bias",
                                 learnable,
                                 is_observed=self._observed)
             mixing = RootVariable(5.,
                                   self.name + "_" + parameter_name + "_" +
                                   "mixing",
                                   learnable,
                                   is_observed=self._observed)
             kwargs.update({
                 parameter_name:
                 ranges[parameter_name].forward_transform(
                     BF.sigmoid(mixing) * value +
                     (1 - BF.sigmoid(mixing)) * bias, dim)
             })
Esempio n. 2
0
 def __call__(self, var):
     dot_output = BF.dot(self.w, var, reduce=False) + self.b
     output = var + self.u*BF.sigmoid(dot_output)
     d_sigmoid = lambda x: BF.sigmoid(x)*(1. - BF.sigmoid(x))
     psy = d_sigmoid(dot_output)*self.w
     log_det = -BF.log(BF.abs(1. + BF.dot(self.u, psy)) + self.shift)
     return DeterministicVariable(output,
                                  log_determinant=log_det,
                                  name="PlanarFlow {}".format(var.name))
Esempio n. 3
0
    def construct_biases(self, learnable, ranges, kwargs):
        """
        Method. Constructs a bias variable for each variable in the input. Bias variables are deterministic variables
        that transform the input variables.

        Args:
            learnable: Bool. Set true if the biases should be learnable.

            ranges: Dictionary(str: brancher.GeometricRange). Dictionary of variable names and the ranges that apply on
            those variables.

            kwargs: Named variables list that define the input variables of this variable. For each variable in here a
            bias variable will be created.

        Returns:
            None.
        """
        for parameter_name, value in kwargs.items():
            if isinstance(value,
                          (Variable, PartialLink, np.ndarray, numbers.Number)):
                if isinstance(value, np.ndarray):
                    dim = value.shape[0]
                elif isinstance(value, numbers.Number):
                    dim = 1
                else:
                    dim = []
                bias = RootVariable(0.,
                                    self.name + "_" + parameter_name + "_" +
                                    "bias",
                                    learnable,
                                    is_observed=self._observed)
                mixing = RootVariable(5.,
                                      self.name + "_" + parameter_name + "_" +
                                      "mixing",
                                      learnable,
                                      is_observed=self._observed)
                kwargs.update({
                    parameter_name:
                    ranges[parameter_name].forward_transform(
                        BF.sigmoid(mixing) * value +
                        (1 - BF.sigmoid(mixing)) * bias, dim)
                })
Esempio n. 4
0
            Qh_mean.append(
                RootVariable(0, h_names[t] + "_mean", learnable=True))
            Qhlambda.append(
                RootVariable(1., h_names[t] + "_lambda", learnable=True))

            Qz_mean.append(
                RootVariable(0, z_names[t] + "_mean", learnable=True))
            Qzlambda.append(
                RootVariable(1., z_names[t] + "_lambda", learnable=True))

            new_x = Qx[t - 1] + dt * s * (Qh[t - 1] - Qx[t - 1])
            new_h = Qh[t - 1] + dt * (Qx[t - 1] * (r - Qz[t - 1]) - Qh[t - 1])
            new_z = Qz[t - 1] + dt * (Qx[t - 1] * Qh[t - 1] - b * Qz[t - 1])

            Qx.append(
                NormalVariable(BF.sigmoid(Qxlambda[t]) * new_x +
                               (1 - BF.sigmoid(Qxlambda[t])) * Qx_mean[t],
                               2 * driving_noise,
                               x_names[t],
                               learnable=True))

            Qh.append(
                NormalVariable(BF.sigmoid(Qhlambda[t]) * new_h +
                               (1 - BF.sigmoid(Qhlambda[t])) * Qh_mean[t],
                               2 * driving_noise,
                               h_names[t],
                               learnable=True))

            Qz.append(
                NormalVariable(BF.sigmoid(Qzlambda[t]) * new_z +
                               (1 - BF.sigmoid(Qzlambda[t])) * Qz_mean[t],
    imagesGT.append([
        np.reshape(samples[img[t]].detach().numpy(),
                   (3, image_size, image_size)) for t in range(T)
    ])
    imagesNoise.append([
        np.reshape(samples[x[t]].detach().numpy(), (3, image_size, image_size))
        for t in range(T)
    ])

    # Observe model
    [xt.observe(samples[xt].detach().numpy()[0, :, :, :, :]) for xt in x]

    #### 1 ASDI ####

    PCoperator = lambda x, alpha, l: BF.sigmoid(l) * x + (1 - BF.sigmoid(l)
                                                          ) * alpha

    Qz = [
        NormalVariable(np.zeros((h_size, 1)),
                       np.ones((h_size, 1)),
                       "z0",
                       learnable=True)
    ]
    Qalpha = []
    Qlambda = []
    for t in range(1, T):
        #Qlambda.append(DeterministicVariable(-1*np.ones((h_size, 1)), "lambda{}".format(t), learnable=True))
        Qlambda.append(
            DeterministicVariable(-1., "lambda{}".format(t), learnable=True))
        Qalpha.append(
Esempio n. 6
0
 def forward_transform(self, x, dim):
     return self.lower_bound + (self.upper_bound-self.lower_bound)*BF.sigmoid(x)
              NormalVariable(0., 1., 'x1', learnable=True)]
        Qx_mean = [RootVariable(0., 'x0_mean', learnable=True),
                   RootVariable(0., 'x1_mean', learnable=True)]
        Qlambda = [RootVariable(-0.5, 'x0_lambda', learnable=True),
                   RootVariable(-0.5, 'x1_lambda', learnable=True)]


        for t in range(2, T):
            if t in y_range:
                l = 1.
            else:
                l = 1.
            Qx_mean.append(RootVariable(0, x_names[t] + "_mean", learnable=True))
            Qlambda.append(RootVariable(l, x_names[t] + "_lambda", learnable=True))
            new_mu = (-1 - omega ** 2 * dt ** 2 + b * dt) * Qx[t - 2] + (2 - b * dt) * Qx[t - 1]
            Qx.append(NormalVariable(BF.sigmoid(Qlambda[t])*new_mu + (1 - BF.sigmoid(Qlambda[t]))*Qx_mean[t],
                                     np.sqrt(dt) * driving_noise, x_names[t], learnable=True))
        variational_posterior = ProbabilisticModel(Qx)
        AR_model.set_posterior_model(variational_posterior)

        # Inference #
        inference.perform_inference(AR_model,
                                    number_iterations=N_itr,
                                    number_samples=N_smpl,
                                    optimizer=optimizer,
                                    lr=lr)

        loss_list1 = AR_model.diagnostics["loss curve"]

        # ELBO
        ELBO1.append(float(AR_model.estimate_log_model_evidence(N_ELBO_smpl).detach().numpy()))
Esempio n. 8
0
# Structured NN distribution #
hidden_size = 5
latent_size = 5
out_size = N_groups + N_people
Qepsilon = Normal(np.zeros((latent_size, 1)),
                  np.ones((latent_size, )),
                  'epsilon',
                  learnable=True)
W1 = RootVariable(np.random.normal(0, 0.1, (hidden_size, latent_size)),
                  "W1",
                  learnable=True)
W2 = RootVariable(np.random.normal(0, 0.1, (out_size, hidden_size)),
                  "W2",
                  learnable=True)
pre_x = BF.matmul(W2, BF.sigmoid(BF.matmul(W1, Qepsilon)))

Qgroup_means = [
    Normal(pre_x[n], 4., "group_mean_{}".format(n), learnable=True)
    for n in range(N_groups)
]
Qpeople_means = [
    Normal(pre_x[N_groups + m], 0.1, "person_{}".format(m), learnable=True)
    for m, assignment_list in enumerate(assignment_matrix)
]

model.set_posterior_model(ProbabilisticModel(Qpeople_means + Qgroup_means))

# Inference #
inference.perform_inference(model,
                            number_iterations=N_itr,
Esempio n. 9
0
                                is_observed=True)

    encoder_output1 = DeterministicVariable(encoder1(Qx),
                                            name="encoder_output1")
    encoder_output2 = DeterministicVariable(encoder2(encoder_output1["mean"]),
                                            name="encoder_output2")
    encoder_output3 = DeterministicVariable(encoder3(encoder_output2["mean"]),
                                            name="encoder_output3")

    Qlambda11 = RootVariable(l1 * np.ones((latent_size1, )),
                             'lambda11',
                             learnable=True)
    Qlambda12 = RootVariable(l1 * np.ones((latent_size1, )),
                             'lambda12',
                             learnable=True)
    Qz1 = NormalVariable((1 - BF.sigmoid(Qlambda11)) * encoder_output3["mean"],
                         BF.sigmoid(Qlambda12) * z2sd +
                         (1 - BF.sigmoid(Qlambda12)) * encoder_output3["sd"],
                         name="z1")

    Qdecoder_output1 = DeterministicVariable(decoder1(Qz1),
                                             name="Qdecoder_output1")

    Qlambda21 = RootVariable(l0 * np.ones((latent_size2, )),
                             'lambda21',
                             learnable=True)
    Qlambda22 = RootVariable(l0 * np.ones((latent_size2, )),
                             'lambda22',
                             learnable=True)
    Qz2 = NormalVariable(
        BF.sigmoid(Qlambda21) * BF.relu(Qdecoder_output1["mean"]) +
    model = ProbabilisticModel([x, z1, z2, z3])

    #
    # Amortized variational distribution
    l0 = 1
    l1 = -1

    Qx = EmpiricalVariable(dataset, batch_size=b_size, name="x", is_observed=True)
    encoder_output1 = DeterministicVariable(encoder1(Qx), name="encoder_output1")
    encoder_output2 = DeterministicVariable(encoder2(encoder_output1["mean"]), name="encoder_output2")
    encoder_output3 = DeterministicVariable(encoder3(encoder_output2["mean"]), name="encoder_output3")


    Qlambda11 = RootVariable(l1*np.ones((latent_size1,)), 'lambda11', learnable=True)
    Qlambda12 = RootVariable(l1*np.ones((latent_size1,)), 'lambda12', learnable=True)
    Qz1 = NormalVariable((1 - BF.sigmoid(Qlambda11))*encoder_output3["mean"],
                         BF.sigmoid(Qlambda12) * z2sd + (1 - BF.sigmoid(Qlambda12)) * encoder_output3["sd"], name="z1")

    Qdecoder_output1 = DeterministicVariable(decoder1(Qz1), name="Qdecoder_output1")

    Qlambda21 = RootVariable(l0*np.ones((latent_size2,)), 'lambda21', learnable=True)
    Qlambda22 = RootVariable(l0*np.ones((latent_size2,)), 'lambda22', learnable=True)
    Qz2 = NormalVariable(BF.sigmoid(Qlambda21)*BF.relu(Qdecoder_output1["mean"]) + (1 - BF.sigmoid(Qlambda21))*encoder_output2["mean"],
                         BF.sigmoid(Qlambda22) * z2sd + (1 - BF.sigmoid(Qlambda22)) * encoder_output2["sd"], name="z2")

    Qdecoder_output2 = DeterministicVariable(decoder2(Qz2), name="Qdecoder_output2")

    Qlambda31 = RootVariable(l0*np.ones((latent_size3,)), 'lambda31', learnable=True)
    Qlambda32 = RootVariable(l0*np.ones((latent_size3,)), 'lambda32', learnable=True)
    Qz3 = NormalVariable(BF.sigmoid(Qlambda31)*BF.relu(Qdecoder_output2["mean"]) + (1 - BF.sigmoid(Qlambda31))*encoder_output1["mean"],
                         BF.sigmoid(Qlambda32) * z3sd + (1 - BF.sigmoid(Qlambda32)) * encoder_output1["sd"], name="z3")
Esempio n. 11
0
 def __call__(self, var):
     return DeterministicVariable(BF.sigmoid(var),
                                  log_determinant=-BF.log(BF.sigmoid(var)) - BF.log(1 - BF.sigmoid(var)),
                                  name="Sigmoid({})".format(var.name))
Esempio n. 12
0
        # Structured variational distribution #
        Qx = [NormalVariable(0., 1., 'x0', learnable=True)]
        Qx_mean = [RootVariable(0., 'x0_mean', learnable=True)]
        Qlambda = [RootVariable(0., 'x0_lambda', learnable=True)]

        for t in range(1, T):
            if t in y_range:
                l = 0.
            else:
                l = 0.
            Qx_mean.append(
                RootVariable(0, x_names[t] + "_mean", learnable=True))
            Qlambda.append(
                RootVariable(l, x_names[t] + "_lambda", learnable=True))
            Qx.append(
                NormalVariable(BF.sigmoid(Qlambda[t]) * Qx[t - 1] +
                               (1 - BF.sigmoid(Qlambda[t])) * Qx_mean[t],
                               driving_noise,
                               x_names[t],
                               learnable=True))
        variational_posterior = ProbabilisticModel(Qx)
        AR_model.set_posterior_model(variational_posterior)

        # Inference #
        inference.perform_inference(AR_model,
                                    number_iterations=N_itr,
                                    number_samples=N_smpl,
                                    optimizer=optimizer,
                                    lr=lr)

        loss_list1 = AR_model.diagnostics["loss curve"]
Esempio n. 13
0
    RootVariable(0., 'x0_lambda', learnable=True),
    RootVariable(0., 'x1_lambda', learnable=True)
]

for t in range(2, T):
    if t in y_range:
        l = 0.
    else:
        l = 0.
    Qx_mean.append(RootVariable(0, x_names[t] + "_mean", learnable=True))
    Qlambda.append(RootVariable(l, x_names[t] + "_lambda", learnable=True))
    new_mu = (-1 + b * dt) * Qx[t - 2] - Qomega**2 * dt**2 * (BF.sin(
        Qx[t - 2])) + (2 - b * dt) * Qx[t - 1]
    #new_mu = (-1 - Qomega ** 2 * dt ** 2 + b * dt) * Qx[t - 2] + (2 - b * dt) * Qx[t - 1]
    Qx.append(
        NormalVariable(BF.sigmoid(Qlambda[t]) * new_mu +
                       (1 - BF.sigmoid(Qlambda[t])) * Qx_mean[t],
                       driving_noise,
                       x_names[t],
                       learnable=True))
variational_posterior = ProbabilisticModel(Qx)
AR_model.set_posterior_model(variational_posterior)

# Inference #
inference.perform_inference(AR_model,
                            number_iterations=N_itr,
                            number_samples=N_smpl,
                            optimizer=optimizer,
                            lr=lr)

loss_list1 = AR_model.diagnostics["loss curve"]
#plt.show()

# Structured variational distribution #
Qx = [NormalVariable(0., 1., 'x0', learnable=True)]
Qx_mean = [RootVariable(0., 'x0_mean', learnable=True)]
Qlambda = [RootVariable(-1., 'x0_lambda', learnable=True)]

for t in range(1, T):
    if t in y_range:
        l = 0.
    else:
        l = 1.
    Qx_mean.append(RootVariable(0, x_names[t] + "_mean", learnable=True))
    Qlambda.append(RootVariable(l, x_names[t] + "_lambda", learnable=True))
    Qx.append(
        NormalVariable(BF.sigmoid(Qlambda[t]) * Qx[t - 1] +
                       (1 - BF.sigmoid(Qlambda[t])) * Qx_mean[t],
                       np.sqrt(dt) * driving_noise,
                       x_names[t],
                       learnable=True))
variational_posterior = ProbabilisticModel(Qx)
AR_model.set_posterior_model(variational_posterior)

# Inference #
inference.perform_inference(AR_model,
                            number_iterations=N_itr,
                            number_samples=N_smpl,
                            optimizer=optimizer,
                            lr=lr)

loss_list1 = AR_model.diagnostics["loss curve"]