variational_samplers = [
            ProbabilisticModel([
                NormalVariable(loc=loc1,
                               scale=0.1,
                               name="weights1",
                               learnable=True),
                NormalVariable(loc=loc2,
                               scale=0.1,
                               name="weights2",
                               learnable=True)
            ]) for loc1, loc2 in zip(initial_locations1, initial_locations2)
        ]

        # Inference
        inference_method = WVGD(variational_samplers=variational_samplers,
                                particles=particles,
                                biased=False)
        inference.perform_inference(model,
                                    inference_method=inference_method,
                                    number_iterations=1000,
                                    number_samples=100,
                                    optimizer="Adam",
                                    lr=0.0025,
                                    posterior_model=particles,
                                    pretraining_iterations=0)
        loss_list = model.diagnostics["loss curve"]

        # Test accuracy
        test_size = len(ind[dataset_size:])
        num_images = test_size * 3
        test_indices = RandomIndices(dataset_size=test_size,
particle_2 = DeterministicVariable(initial_location_2, name="weights", learnable=True)
particle_locations = [particle_1, particle_2]
particles = [ProbabilisticModel([l]) for l in particle_locations]

# Importance sampling distributions
voranoi_set = VoronoiSet(particle_locations) #TODO: Bug if you use variables instead of probabilistic models
variational_samplers = [ProbabilisticModel([TruncatedNormalVariable(mu=initial_location_1, sigma=0.1,
                                                truncation_rule=lambda a: voranoi_set(a, 0),
                                                name="weights", learnable=True)]),
                        ProbabilisticModel([TruncatedNormalVariable(mu=initial_location_2, sigma=0.1,
                                                truncation_rule=lambda a: voranoi_set(a, 1),
                                                name="weights", learnable=True)])]

# Inference
inference.perform_inference(model,
                            inference_method=WVGD(biased=True),
                            number_iterations=1000,
                            number_samples=50,
                            optimizer=chainer.optimizers.Adam(0.005),
                            posterior_model=particles,
                            sampler_model=variational_samplers,
                            pretraining_iterations=0)
loss_list = model.diagnostics["loss curve"]

# Local variational models
plt.plot(loss_list)
plt.show()

# Test accuracy
num_images = 2000
test_size = len(test)
Пример #3
0
#initial_locations = [0, 0.1]
particles = [
    ProbabilisticModel([RootVariable(p, name="theta", learnable=True)])
    for p in initial_locations
]

# Importance sampling distributions
variational_samplers = [
    ProbabilisticModel([
        NormalVariable(loc=location, scale=0.2, name="theta", learnable=True)
    ]) for location in initial_locations
]

# Inference
inference_method = WVGD(variational_samplers=variational_samplers,
                        particles=particles,
                        biased=False,
                        number_post_samples=80000)
inference.perform_inference(model,
                            inference_method=inference_method,
                            number_iterations=1500,
                            number_samples=50,
                            optimizer="Adam",
                            lr=0.005,
                            posterior_model=particles,
                            pretraining_iterations=0)
loss_list = model.diagnostics["loss curve"]

# Local variational models
plt.plot(loss_list)
plt.show()
                RootVariable(b, name="b", learnable=True)
            ]) for wk, wl, b in zip(wk_locations, wl_locations, b_locations)
        ]

        # Importance sampling distributions
        variational_samplers = [
            ProbabilisticModel([
                NormalVariable(wk, 0.1 + 0 * wk, name="Wk", learnable=True),
                NormalVariable(wl, 0.1 + 0 * wl, name="Wl", learnable=True),
                NormalVariable(b, 0.1 + 0 * b, name="b", learnable=True)
            ]) for wk, wl, b in zip(wk_locations, wl_locations, b_locations)
        ]

        # Inference
        inference_method = WVGD(variational_samplers=variational_samplers,
                                particles=particles,
                                number_post_samples=500,
                                biased=True)
        inference.perform_inference(
            model,
            inference_method=inference_method,
            number_iterations=1500,  #4000
            number_samples=20,  #10
            optimizer="Adam",
            lr=0.05,
            posterior_model=particles,
            pretraining_iterations=0)
        loss_list = model.diagnostics["loss curve"]
        plt.plot(loss_list)

        # ELBO
        ELBO = model.posterior_model.estimate_log_model_evidence(