def __init__(self, v, mat_dim, shift=0.001, upper=False, learnable=False):
     self.v = v
     tri_matrix = BF.triangular_form(v)
     if upper:
         tri_matrix = BF.transpose(tri_matrix, 2, 1) #TODO: Dangerous, uses inconsistent indexing
     self.tri_matrix = tri_matrix
     self.diag_indices = np.diag_indices(mat_dim)
     self.shift = shift
     self.upper = upper
     self.learnable = learnable
Example #2
0
 def forward_transform(self, x, dim):
     return BF.matmul(x, BF.transpose(x, -2, -1))
Example #3
0
                            optimizer=optimizer,
                            lr=lr)
loss_list3 = model.diagnostics["loss curve"]
ELBO3 = model.estimate_log_model_evidence(1000)

# Structured Gaussian distribution (low rank) #
rank = 1
cov_factor = RootVariable(np.random.normal(0, 0.1, (out_size, rank)),
                          "cov_factor")
cov_shift = RootVariable(0.01 * np.identity(out_size),
                         "cov_shift",
                         learnable=False)
mean_shift = RootVariable(np.zeros((out_size, )), "mean_shift", learnable=True)
QV = MultivariateNormal(loc=mean_shift,
                        covariance_matrix=cov_shift +
                        BF.matmul(cov_factor, BF.transpose(cov_factor, 2, 1)),
                        name="V",
                        learnable=True)

Qgroup_means = [
    Normal(QV[n], 4., "group_mean_{}".format(n), learnable=True)
    for n in range(N_groups)
]
Qpeople_means = [
    Normal(QV[N_groups + m], 0.1, "person_{}".format(m), learnable=True)
    for m, assignment_list in enumerate(assignment_matrix)
]

model.set_posterior_model(ProbabilisticModel(Qpeople_means + Qgroup_means))

# Inference #