def test_meanfield_save_and_load(self): from mxfusion.inference.meanfield import create_Gaussian_meanfield from mxfusion.inference import StochasticVariationalInference from mxfusion.inference.grad_based_inference import GradBasedInference from mxfusion.inference import BatchInferenceLoop x = np.random.rand(1000, 1) y = np.random.rand(1000, 1) x_nd, y_nd = mx.nd.array(y), mx.nd.array(x) net = self.make_net() net(x_nd) m = self.make_model(net) observed = [m.y, m.x] q = create_Gaussian_meanfield(model=m, observed=observed) alg = StochasticVariationalInference(num_samples=3, model=m, observed=observed, posterior=q) infr = GradBasedInference(inference_algorithm=alg, grad_loop=BatchInferenceLoop()) infr.initialize(y=y_nd, x=x_nd) infr.run(max_iter=1, learning_rate=1e-2, y=y_nd, x=x_nd) infr.save(prefix=self.PREFIX) net2 = self.make_net() net2(x_nd) m2 = self.make_model(net2) observed2 = [m2.y, m2.x] q2 = create_Gaussian_meanfield(model=m2, observed=observed2) alg2 = StochasticVariationalInference(num_samples=3, model=m2, observed=observed2, posterior=q2) infr2 = GradBasedInference(inference_algorithm=alg2, grad_loop=BatchInferenceLoop()) infr2.initialize(y=y_nd, x=x_nd) # Load previous parameters infr2.load(primary_model_file=self.PREFIX+'_graph_0.json', secondary_graph_files=[self.PREFIX+'_graph_1.json'], parameters_file=self.PREFIX+'_params.json', inference_configuration_file=self.PREFIX+'_configuration.json', mxnet_constants_file=self.PREFIX+'_mxnet_constants.json', variable_constants_file=self.PREFIX+'_variable_constants.json') for original_uuid, original_param in infr.params.param_dict.items(): original_data = original_param.data().asnumpy() reloaded_data = infr2.params.param_dict[infr2._uuid_map[original_uuid]].data().asnumpy() assert np.all(np.isclose(original_data, reloaded_data)) for original_uuid, original_param in infr.params.constants.items(): if isinstance(original_param, mx.ndarray.ndarray.NDArray): original_data = original_param.asnumpy() reloaded_data = infr2.params.constants[infr2._uuid_map[original_uuid]].asnumpy() else: original_data = original_param reloaded_data = infr2.params.constants[infr2._uuid_map[original_uuid]] assert np.all(np.isclose(original_data, reloaded_data)) infr2.run(max_iter=1, learning_rate=1e-2, y=y_nd, x=x_nd) self.remove_saved_files(self.PREFIX)
def test_softplus_in_params(self): m = make_basic_model() x = np.random.rand(1000, 1) y = np.random.rand(1000, 1) x_nd, y_nd = mx.nd.array(y), mx.nd.array(x) from mxfusion.inference.meanfield import create_Gaussian_meanfield from mxfusion.inference import StochasticVariationalInference from mxfusion.inference.grad_based_inference import GradBasedInference from mxfusion.inference import BatchInferenceLoop observed = [m.x] q = create_Gaussian_meanfield(model=m, observed=observed) alg = StochasticVariationalInference(num_samples=3, model=m, observed=observed, posterior=q) infr = GradBasedInference(inference_algorithm=alg, grad_loop=BatchInferenceLoop()) infr.initialize(x=x_nd) infr.run(max_iter=1, learning_rate=1e-2, x=x_nd) uuid_of_pos_var = m.v.uuid infr.params._params[uuid_of_pos_var]._data = mx.nd.array([-10]) raw_value = infr.params._params[uuid_of_pos_var].data() transformed_value = infr.params[m.v] assert raw_value.asnumpy()[0] < 0 and transformed_value.asnumpy( )[0] > 0
def get_ppca_grad(self, x_train, inf_type, num_samples=100): import random dtype = get_default_dtype() random.seed(0) np.random.seed(0) mx.random.seed(0) m = self.make_ppca_model() q = self.make_ppca_post(m) observed = [m.x] alg = inf_type(num_samples=num_samples, model=m, posterior=q, observed=observed) from mxfusion.inference.grad_based_inference import GradBasedInference from mxfusion.inference import BatchInferenceLoop infr = GradBasedInference(inference_algorithm=alg, grad_loop=BatchInferenceLoop()) infr.initialize(x=mx.nd.array(x_train, dtype=dtype)) infr.run(max_iter=1, learning_rate=1e-2, x=mx.nd.array(x_train, dtype=dtype), verbose=False) return infr, q.post_mean
def test_meanfield_saving(self): dtype = get_default_dtype() x = np.random.rand(10, 1) y = np.random.rand(10, 1) x_nd, y_nd = mx.nd.array(y, dtype=dtype), mx.nd.array(x, dtype=dtype) self.net = self.make_net() self.net(x_nd) m = self.make_model(self.net) from mxfusion.inference.meanfield import create_Gaussian_meanfield from mxfusion.inference import StochasticVariationalInference from mxfusion.inference.grad_based_inference import GradBasedInference from mxfusion.inference import BatchInferenceLoop observed = [m.y, m.x] q = create_Gaussian_meanfield(model=m, observed=observed) alg = StochasticVariationalInference(num_samples=3, model=m, observed=observed, posterior=q) infr = GradBasedInference(inference_algorithm=alg, grad_loop=BatchInferenceLoop()) infr.initialize(y=y_nd, x=x_nd) infr.run(max_iter=1, learning_rate=1e-2, y=y_nd, x=x_nd) infr.save(self.ZIPNAME) os.remove(self.ZIPNAME)
def test_one_map_example(self): """ Tests that the creation of variables from a base gluon block works correctly. """ from mxfusion.inference.map import MAP from mxfusion.inference.grad_based_inference import GradBasedInference from mxfusion.inference import BatchInferenceLoop observed = [self.m.y] alg = MAP(model=self.m, observed=observed) infr = GradBasedInference(inference_algorithm=alg, grad_loop=BatchInferenceLoop()) infr.run(y=mx.nd.array(np.random.rand(10)), max_iter=10)
def test_score_function_rb_batch(self): x = np.random.rand(1000, 1) y = np.random.rand(1000, 1) x_nd, y_nd = mx.nd.array(y), mx.nd.array(x) self.net = self.make_net() self.net(x_nd) m = self.make_bnn_model(self.net) from mxfusion.inference.meanfield import create_Gaussian_meanfield from mxfusion.inference.grad_based_inference import GradBasedInference from mxfusion.inference import BatchInferenceLoop observed = [m.y, m.x] q = create_Gaussian_meanfield(model=m, observed=observed) alg = ScoreFunctionRBInference(num_samples=3, model=m, observed=observed, posterior=q) infr = GradBasedInference(inference_algorithm=alg, grad_loop=BatchInferenceLoop()) infr.initialize(y=y_nd, x=x_nd) infr.run(max_iter=1, learning_rate=1e-2, y=y_nd, x=x_nd)
def test_meanfield_save_and_load(self): dtype = get_default_dtype() from mxfusion.inference.meanfield import create_Gaussian_meanfield from mxfusion.inference import StochasticVariationalInference from mxfusion.inference.grad_based_inference import GradBasedInference from mxfusion.inference import BatchInferenceLoop x = np.random.rand(1000, 1) y = np.random.rand(1000, 1) x_nd, y_nd = mx.nd.array(y, dtype=dtype), mx.nd.array(x, dtype=dtype) net = self.make_net() net(x_nd) m = self.make_model(net) observed = [m.y, m.x] q = create_Gaussian_meanfield(model=m, observed=observed) alg = StochasticVariationalInference(num_samples=3, model=m, observed=observed, posterior=q) infr = GradBasedInference(inference_algorithm=alg, grad_loop=BatchInferenceLoop()) infr.initialize(y=y_nd, x=x_nd) infr.run(max_iter=1, learning_rate=1e-2, y=y_nd, x=x_nd) infr.save(self.ZIPNAME) net2 = self.make_net() net2(x_nd) m2 = self.make_model(net2) observed2 = [m2.y, m2.x] q2 = create_Gaussian_meanfield(model=m2, observed=observed2) alg2 = StochasticVariationalInference(num_samples=3, model=m2, observed=observed2, posterior=q2) infr2 = GradBasedInference(inference_algorithm=alg2, grad_loop=BatchInferenceLoop()) infr2.initialize(y=y_nd, x=x_nd) # Load previous parameters infr2.load(self.ZIPNAME) for original_uuid, original_param in infr.params.param_dict.items(): original_data = original_param.data().asnumpy() reloaded_data = infr2.params.param_dict[ infr2._uuid_map[original_uuid]].data().asnumpy() assert np.all(np.isclose(original_data, reloaded_data)) for original_uuid, original_param in infr.params.constants.items(): if isinstance(original_param, mx.ndarray.ndarray.NDArray): original_data = original_param.asnumpy() reloaded_data = infr2.params.constants[ infr2._uuid_map[original_uuid]].asnumpy() else: original_data = original_param reloaded_data = infr2.params.constants[ infr2._uuid_map[original_uuid]] assert np.all(np.isclose(original_data, reloaded_data)) infr2.run(max_iter=1, learning_rate=1e-2, y=y_nd, x=x_nd) os.remove(self.ZIPNAME)