Exemplo n.º 1
0
 def setUpClass(cls):
     cls.generative_model = GenerativeModel(
         ex.priors.dm_prior, ex.simulators.dm_batch_simulator)
     cls.network = InvariantNetwork()
     params, sim_data = cls.generative_model(64, 128)
     cls.network(sim_data)  # one prediction to initialize layers
     cls.optimizer = tf.keras.optimizers.Adam()
Exemplo n.º 2
0
    def test_no_generative_model(self):
        D = 6
        J = 6
        bf_meta = build_meta_dict({'n_params': D, 'n_models': J}, DEFAULT_SETTING_INVARIANT_BAYES_FLOW)

        amortizer = ex.amortizers.InvariantBayesFlow(bf_meta)
        trainer = MetaTrainer(amortizer,
                              loss=kl_latent_space,
                              learning_rate=.0003
                              )

        generative_model = GenerativeModel(
            model_prior,
            [TPrior(D // 2, 1.0, 5.0)] * J,
            [MultivariateTSimulator(df) for df in np.arange(1, J + 1, 1)]
        )
        model_indices, params, sim_data = generative_model(64, 128)
        _losses = trainer.train_offline(2, 16, model_indices, params, sim_data)

        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.train_online(epochs=2, iterations_per_epoch=20, batch_size=32, n_obs=110)

        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.train_rounds(epochs=1, rounds=5, sim_per_round=100, batch_size=32, n_obs=110)

        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.simulate_and_train_offline(n_sim=100, epochs=2, batch_size=16, n_obs=110)
Exemplo n.º 3
0
    def test_no_generative_model(self):
        summary_net = SequenceNet()

        evidential_meta = {
            'n_models': 3,
            'out_activation': 'softplus',
            'n_dense': 3,
            'dense_args': {'kernel_initializer': 'glorot_uniform', 'activation': 'relu', 'units': 128}
        }
        evidential_net = EvidentialNetwork(evidential_meta)
        amortizer = MultiModelAmortizer(evidential_net, summary_net)
        trainer = ModelComparisonTrainer(amortizer, n_models=3)

        generative_model = GenerativeModel(
            model_prior,
            [ex.priors.model1_params_prior, ex.priors.model2_params_prior, ex.priors.model3_params_prior],
            [ex.simulators.forward_model1, ex.simulators.forward_model2, ex.simulators.forward_model3]
        )
        model_indices, params, sim_data = generative_model(64, 128)
        _losses = trainer.train_offline(2, 16, model_indices, sim_data)
        _losses = trainer.train_offline(2, 16, np.random.randint(0, 3, (64,)), sim_data)  # expect message
        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.train_online(epochs=2, iterations_per_epoch=20, batch_size=32, n_obs=100)

        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.train_rounds(epochs=2, rounds=2, sim_per_round=50, batch_size=32, n_obs=110)
Exemplo n.º 4
0
 def setUpClass(cls):
     summary_net = InvariantNetwork()
     inference_net = InvertibleNetwork({'n_params': 5})
     amortizer = SingleModelAmortizer(inference_net, summary_net)
     generative_model = GenerativeModel(ex.priors.dm_prior, ex.simulators.dm_batch_simulator)
     trainer = ParameterEstimationTrainer(amortizer, generative_model)
     cls.trainer = trainer
Exemplo n.º 5
0
    def setUpClass(cls):
        summary_net = SequenceNet()

        evidential_meta = {
            'n_models': 3,
            'out_activation': 'softplus',
            'n_dense': 3,
            'dense_args': {
                'kernel_initializer': 'glorot_uniform',
                'activation': 'relu',
                'units': 128
            }
        }
        evidential_net = EvidentialNetwork(evidential_meta)
        amortizer = MultiModelAmortizer(evidential_net, summary_net)
        generative_model = GenerativeModel(ex.priors.model_prior, [
            ex.priors.model1_params_prior, ex.priors.model2_params_prior,
            ex.priors.model3_params_prior
        ], [
            ex.simulators.forward_model1, ex.simulators.forward_model2,
            ex.simulators.forward_model3
        ])

        trainer = ModelComparisonTrainer(amortizer, generative_model)
        cls.trainer = trainer
Exemplo n.º 6
0
 def test_meta_generative_model(self):
     M = 10
     D = 100
     prior = TPrior(D // 2, mu_scale=1.0, scale_scale=5.0)
     priors = [prior] * M
     simulators = [
         MultivariateTSimulator(df) for df in np.arange(1, 101, M)
     ]
     generative_model = GenerativeModel(model_prior, priors, simulators)
     _model_indices, _params, _sim_data = generative_model(n_sim=N_SIM,
                                                           n_obs=N_OBS)
Exemplo n.º 7
0
    def test_structural_param_transform_tuple_to_numpy(self):
        D = 5

        def param_transform_mvn(theta):
            means, cov = theta
            var = np.diagonal(cov, axis1=1, axis2=2)
            return np.concatenate([means, var], axis=1)

        prior = GaussianMeanCovPrior(D=D, a0=10, b0=1, m0=0, beta0=1)
        simulator = GaussianMeanCovSimulator()
        generative_model = GenerativeModel(prior,
                                           simulator,
                                           param_transform=param_transform_mvn)
Exemplo n.º 8
0
    def setUpClass(cls):
        D = 10
        J = 10
        bf_meta = build_meta_dict({'n_params': D, 'n_models': J}, DEFAULT_SETTING_INVARIANT_BAYES_FLOW)

        amortizer = ex.amortizers.InvariantBayesFlow(bf_meta)
        generative_model = GenerativeModel(
            model_prior,
            [TPrior(D // 2, 1.0, 5.0)] * J,
            [MultivariateTSimulator(df) for df in np.arange(1, J + 1, 1)]
        )

        trainer = MetaTrainer(amortizer,
                              generative_model,
                              loss=kl_latent_space,
                              learning_rate=.0003
                              )
        cls.trainer = trainer
Exemplo n.º 9
0
    def test_no_generative_model(self):
        summary_net = InvariantNetwork()
        inference_net = InvertibleNetwork({'n_params': 5})
        amortizer = SingleModelAmortizer(inference_net, summary_net)
        generative_model = GenerativeModel(ex.priors.dm_prior, ex.simulators.dm_batch_simulator)
        trainer = ParameterEstimationTrainer(amortizer)

        params, sim_data = generative_model(64, 128)
        _losses = trainer.train_offline(2, 16, params, sim_data)

        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.train_online(epochs=2, iterations_per_epoch=20, batch_size=32, n_obs=100)

        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.train_experience_replay(epochs=2, batch_size=32, iterations_per_epoch=20,
                                                      capacity=100, n_obs=np.random.randint(106, 301))

        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.train_rounds(epochs=2, rounds=2, sim_per_round=20, batch_size=32, n_obs=150)

        with self.assertRaises(OperationNotSupportedError):
            _losses = trainer.simulate_and_train_offline(n_sim=100, epochs=2, batch_size=32, n_obs=100)
Exemplo n.º 10
0
 def setUpClass(cls):
     cls.generative_model = GenerativeModel(
         ex.priors.dm_prior, ex.simulators.dm_batch_simulator)
     cls.invariant_network = InvariantNetwork()
     cls.network = InvertibleNetwork({'n_params': 5})
     cls.optimizer = tf.keras.optimizers.Adam()
Exemplo n.º 11
0
 def test_simple_generative_model(self):
     generative_model = GenerativeModel(ex.priors.dm_prior,
                                        ex.simulators.dm_batch_simulator)
     _params, _sim_data = generative_model(n_sim=N_SIM, n_obs=N_OBS)