def test_no_generative_model(self): D = 6 J = 6 bf_meta = build_meta_dict({'n_params': D, 'n_models': J}, DEFAULT_SETTING_INVARIANT_BAYES_FLOW) amortizer = ex.amortizers.InvariantBayesFlow(bf_meta) trainer = MetaTrainer(amortizer, loss=kl_latent_space, learning_rate=.0003 ) generative_model = GenerativeModel( model_prior, [TPrior(D // 2, 1.0, 5.0)] * J, [MultivariateTSimulator(df) for df in np.arange(1, J + 1, 1)] ) model_indices, params, sim_data = generative_model(64, 128) _losses = trainer.train_offline(2, 16, model_indices, params, sim_data) with self.assertRaises(OperationNotSupportedError): _losses = trainer.train_online(epochs=2, iterations_per_epoch=20, batch_size=32, n_obs=110) with self.assertRaises(OperationNotSupportedError): _losses = trainer.train_rounds(epochs=1, rounds=5, sim_per_round=100, batch_size=32, n_obs=110) with self.assertRaises(OperationNotSupportedError): _losses = trainer.simulate_and_train_offline(n_sim=100, epochs=2, batch_size=16, n_obs=110)
def init_same_param_shapes(cls): M = 10 D = 8 prior = TPrior(D // 2, mu_scale=1.0, scale_scale=5.0) priors = [prior] * M simulators = [ MultivariateTSimulator(df) for df in np.arange(1, 101, M) ] generative_model = MetaGenerativeModel(model_prior, priors, simulators) return generative_model
def test_meta_generative_model(self): M = 10 D = 100 prior = TPrior(D // 2, mu_scale=1.0, scale_scale=5.0) priors = [prior] * M simulators = [ MultivariateTSimulator(df) for df in np.arange(1, 101, M) ] generative_model = GenerativeModel(model_prior, priors, simulators) _model_indices, _params, _sim_data = generative_model(n_sim=N_SIM, n_obs=N_OBS)
def test_same_data_transform(self): def data_transform(x): noise = 0.001 * np.random.random(x.shape) return x + noise M = 10 D = 8 prior = TPrior(D // 2, mu_scale=1.0, scale_scale=5.0) priors = [prior] * M simulators = [ MultivariateTSimulator(df) for df in np.arange(1, 101, M) ] _generative_model = MetaGenerativeModel(model_prior=model_prior, priors=priors, simulators=simulators, data_transforms=data_transform)
def test_same_param_transform(self): def param_transform(x): return np.exp(x) M = 10 D = 8 prior = TPrior(D // 2, mu_scale=1.0, scale_scale=5.0) priors = [prior] * M simulators = [ MultivariateTSimulator(df) for df in np.arange(1, 101, M) ] _generative_model = MetaGenerativeModel( model_prior=model_prior, priors=priors, simulators=simulators, param_transforms=param_transform)
def setUpClass(cls): D = 10 J = 10 bf_meta = build_meta_dict({'n_params': D, 'n_models': J}, DEFAULT_SETTING_INVARIANT_BAYES_FLOW) amortizer = ex.amortizers.InvariantBayesFlow(bf_meta) generative_model = GenerativeModel( model_prior, [TPrior(D // 2, 1.0, 5.0)] * J, [MultivariateTSimulator(df) for df in np.arange(1, J + 1, 1)] ) trainer = MetaTrainer(amortizer, generative_model, loss=kl_latent_space, learning_rate=.0003 ) cls.trainer = trainer
def test_individual_param_and_data_transform(self): param_transforms = [ lambda x: np.exp(x), None, lambda x: np.round(x, 3) ] data_transforms = [ lambda x: x + np.random.random(x.shape), lambda x: np.exp(x), None ] M = 3 D = 4 prior = TPrior(D // 2, mu_scale=1.0, scale_scale=5.0) priors = [prior] * M simulators = [ MultivariateTSimulator(df) for df in np.round(np.linspace(1, 101, M)) ] _generative_model = MetaGenerativeModel( model_prior=model_prior, priors=priors, simulators=simulators, param_transforms=param_transforms, data_transforms=data_transforms)