示例#1
0
# Architecture parameters
number_hidden_units = 20
b1 = NormalVariable(np.zeros((number_hidden_units, 1)), 10 * np.ones(
    (number_hidden_units, 1)), "b1")
b2 = NormalVariable(np.zeros((number_output_classes, 1)), 10 * np.ones(
    (number_output_classes, 1)), "b2")
weights1 = NormalVariable(np.zeros(
    (number_hidden_units, number_pixels)), 10 * np.ones(
        (number_hidden_units, number_pixels)), "weights1")
weights2 = NormalVariable(
    np.zeros((number_output_classes, number_hidden_units)), 10 * np.ones(
        (number_output_classes, number_hidden_units)), "weights2")

# Forward pass
hidden_units = BF.tanh(BF.matmul(weights1, x) + b1)
final_activations = BF.matmul(weights2, hidden_units) + b2
k = CategoricalVariable(softmax_p=final_activations, name="k")

# Probabilistic model
model = ProbabilisticModel([k])

# Observations
k.observe(labels)

# Variational Model
Qb1 = NormalVariable(np.zeros((number_hidden_units, 1)),
                     0.2 * np.ones((number_hidden_units, 1)),
                     "b1",
                     learnable=True)
Qb2 = NormalVariable(np.zeros((number_output_classes, 1)),
x = EmpiricalVariable(input_variable,
                      indices=minibatch_indices,
                      name="x",
                      is_observed=True)
labels = EmpiricalVariable(output_labels,
                           indices=minibatch_indices,
                           name="labels",
                           is_observed=True)

# Architecture parameters
weights = NormalVariable(
    np.zeros((number_output_classes, number_regressors)), 10 * np.ones(
        (number_output_classes, number_regressors)), "weights")

# Forward pass
final_activations = BF.matmul(weights, x)
k = CategoricalVariable(logits=final_activations, name="k")

# Probabilistic model
model = ProbabilisticModel([k])

# Observations
k.observe(labels)

# Variational model
num_particles = 2  #10
initial_locations = [
    np.random.normal(0., 1., (number_output_classes, number_regressors))
    for _ in range(num_particles)
]
particles = [
        labels = EmpiricalVariable(output_labels,
                                   indices=minibatch_indices,
                                   name="labels",
                                   is_observed=True)

        # Architecture parameters
        weights1 = NormalVariable(
            np.zeros((number_hidden_nodes, number_regressors)), 10 * np.ones(
                (number_hidden_nodes, number_regressors)), "weights1")
        weights2 = NormalVariable(
            np.zeros((number_output_classes, number_hidden_nodes)),
            10 * np.ones(
                (number_output_classes, number_hidden_nodes)), "weights2")

        # Forward pass
        final_activations = BF.matmul(weights2, BF.tanh(BF.matmul(weights1,
                                                                  x)))
        k = CategoricalVariable(softmax_p=final_activations, name="k")

        # Probabilistic model
        model = ProbabilisticModel([k])

        # Observations
        k.observe(labels)

        # Variational model
        num_particles = N
        initial_locations1 = [
            np.random.normal(0., 1., (number_hidden_nodes, number_regressors))
            for _ in range(num_particles)
        ]
        initial_locations2 = [
imagesGT = []
imagesNoise = []
images1 = []
images2 = []
images3 = []
for rep in range(N_rep):
    h_size = 120
    W1 = DeterministicVariable(np.random.normal(0., 0.2, (h_size, h_size)),
                               "W1",
                               learnable=False)
    W2 = DeterministicVariable(np.random.normal(0., 0.2, (h_size, h_size)),
                               "W2",
                               learnable=False)
    #V = DeterministicVariable(np.random.normal(0., 1., (100, h_size)), "V", learnable=False)

    f = lambda z, W: z + BF.tanh(BF.matmul(W, z))
    F = lambda z, W1, W2: f(f(z, W1), W2)

    measurement_noise = 2.  #1.5
    z = [
        NormalVariable(np.zeros((h_size, 1)),
                       np.ones((h_size, 1)),
                       "z0",
                       learnable=False)
    ]
    img = [
        DeterministicVariable(decoder(BF.reshape(z[0], (h_size, 1, 1))),
                              "img0",
                              learnable=False)
    ]
    x = [
示例#5
0
                                 np.ones((hidden_size, )),
                                 'epsilon',
                                 learnable=True)
        AR_model = ProbabilisticModel(x + y + [epsilon])

        Qepsilon = NormalVariable(np.zeros((hidden_size, 1)),
                                  np.ones((hidden_size, )),
                                  'epsilon',
                                  learnable=True)
        W1 = RootVariable(np.random.normal(0, 0.1, (hidden_size, latent_size)),
                          "W1",
                          learnable=True)
        W2 = RootVariable(np.random.normal(0, 0.1, (3 * T, hidden_size)),
                          "W2",
                          learnable=True)
        pre_x = BF.matmul(W2, BF.sigmoid(BF.matmul(W1, Qepsilon)))
        Qx = []
        Qh = []
        Qz = []
        for t in range(0, T):
            Qx.append(
                NormalVariable(pre_x[t],
                               driving_noise,
                               x_names[t],
                               learnable=True))
            Qh.append(
                NormalVariable(pre_x[T + t],
                               driving_noise,
                               h_names[t],
                               learnable=True))
            Qz.append(
示例#6
0
 def forward_transform(self, x, dim):
     return BF.matmul(x, BF.transpose(x, -2, -1))
示例#7
0
# Structured NN distribution #
hidden_size = 5
latent_size = 5
out_size = N_groups + N_people
Qepsilon = Normal(np.zeros((latent_size, 1)),
                  np.ones((latent_size, )),
                  'epsilon',
                  learnable=True)
W1 = RootVariable(np.random.normal(0, 0.1, (hidden_size, latent_size)),
                  "W1",
                  learnable=True)
W2 = RootVariable(np.random.normal(0, 0.1, (out_size, hidden_size)),
                  "W2",
                  learnable=True)
pre_x = BF.matmul(W2, BF.sigmoid(BF.matmul(W1, Qepsilon)))

Qgroup_means = [
    Normal(pre_x[n], 4., "group_mean_{}".format(n), learnable=True)
    for n in range(N_groups)
]
Qpeople_means = [
    Normal(pre_x[N_groups + m], 0.1, "person_{}".format(m), learnable=True)
    for m, assignment_list in enumerate(assignment_matrix)
]

model.set_posterior_model(ProbabilisticModel(Qpeople_means + Qgroup_means))

# Inference #
inference.perform_inference(model,
                            number_iterations=N_itr,
minibatch_size = 30
minibatch_indices = RandomIndices(dataset_size=dataset_size,
                                  batch_size=minibatch_size,
                                  name="indices",
                                  is_observed=True)
x = EmpiricalVariable(input_variable,
                      indices=minibatch_indices,
                      name="x",
                      is_observed=True)
labels = EmpiricalVariable(output_labels,
                           indices=minibatch_indices,
                           name="labels",
                           is_observed=True)
weights = NormalVariable(np.zeros((1, number_regressors)), 0.5 * np.ones(
    (1, number_regressors)), "weights")
logit_p = BF.matmul(weights, x)
k = BinomialVariable(1, logit_p=logit_p, name="k")
model = ProbabilisticModel([k])

#samples = model._get_sample(300)
#model.calculate_log_probability(samples)

# Observations
k.observe(labels)

#observed_model = inference.get_observed_model(model)
#observed_samples = observed_model._get_sample(number_samples=1, observed=True)

# Variational Model
Qweights = NormalVariable(np.zeros((1, number_regressors)),
                          np.ones((1, number_regressors)),
示例#9
0
#hidden_size1 = 30
#hidden_size2 = 10
out_size = 10

# Weights
#W1 = Deterministic(np.random.normal(0., 0.1, (hidden_size1, input_size)), "W1", learnable=True)
#W2 = Deterministic(np.random.normal(0., 0.1, (hidden_size2, hidden_size1)), "W2", learnable=True)
#W3 = Deterministic(np.random.normal(0., 0.1, (out_size, hidden_size2)), "W3", learnable=True)
V = Deterministic(np.random.normal(0., 0.1, (out_size, input_size)),
                  "W3",
                  learnable=True)

#z1 = Deterministic(BF.relu(BF.matmul(W1, BF.reshape(x, shape=(input_size, 1)))), "z1")
#z2 = Deterministic(BF.relu(BF.matmul(W2, z1)), "z2")
#rho = Deterministic(0.1*BF.matmul(W3, z2), "rho")
rho = Deterministic(BF.matmul(V, x / 255), "rho")
k = Categorical(logits=rho, name="k")

# Observe
k.observe(labels)
model = ProbabilisticModel([k])

# Train
from brancher.inference import MaximumLikelihood
from brancher.inference import perform_inference

perform_inference(model,
                  inference_method=MaximumLikelihood(),
                  number_iterations=150,
                  optimizer="Adam",
                  lr=0.01)
示例#10
0
 def __call__(self, var):
     output = BF.matmul(self.tri_matrix, var)
     log_det = -BF.sum(BF.log(BF.abs(self.tri_matrix[:, self.diag_indices[0], self.diag_indices[1]]) + self.shift), axis=1)
     return DeterministicVariable(output,
                                  log_determinant=log_det,
                                  name="L {}".format(var.name))