def apply_observations(graph_time_steps: List[Tuple[Vertex, Vertex, Vertex]], window: int, observed: List[Coordinates]) -> None: for (idx, time_slice) in enumerate(graph_time_steps): t = window * (window_size - 1) + idx xt = time_slice[0] observed_xt = Gaussian(xt, 1.0) observed_xt.observe(observed[t].x)
def test_can_pass_scalar_to_vertex() -> None: gaussian = Gaussian(0., 1.) sample = gaussian.sample() assert type(sample) == numpy_types assert sample.shape == () assert sample.dtype == float
def test_get_vertex_id() -> None: gaussian = Gaussian(0., 1.) java_id = gaussian.unwrap().getId().getValue() python_id = gaussian.get_id() assert all(value in python_id for value in java_id)
def model() -> Model: with Model() as m: m.temperature = Uniform(0., 100.) m.thermometer_one = Gaussian(m.temperature, 1.0) m.thermometer_two = Gaussian(m.temperature, 1.0) return m
def model() -> Model: with Model() as m: m.a = Gaussian(0., 50.) m.b = Gaussian(0., 50.) m.c = m.a + m.b m.d = Gaussian(m.c, 1.) m.d.observe(20.0) return m
def test_vertex_sample_is_a_numpy_array() -> None: mu = np.array([[1., 2.], [3., 4.]]) sigma = np.array([[.1, .2], [.3, .4]]) vertex = Gaussian(mu, sigma) value = vertex.sample() assert type(value) == np.ndarray assert value.dtype == np.float64 assert value.shape == (2, 2)
def model() -> Model: KeanuRandom.set_default_random_seed(1) with Model() as m: m.a = Gaussian(0., 50.) m.b = Gaussian(0., 50.) m.c = m.a + m.b m.d = Gaussian(m.c, 1.) m.d.observe(20.0) return m
def test_unobserve() -> None: vertex = Gaussian(0., 1.) vertex.observe(4) assert vertex.is_observed() assert vertex.get_value() == 4 vertex.unobserve() assert not vertex.is_observed()
def test_java_collections_to_generator() -> None: gaussian = Gaussian(0., 1.) java_collections = gaussian.unwrap().getConnectedGraph() python_list = list(Vertex._to_generator(java_collections)) java_vertex_ids = [ Vertex._get_python_id(java_vertex) for java_vertex in java_collections ] assert java_collections.size() == len(python_list) assert all( type(element) == Double and element.get_id() in java_vertex_ids for element in python_list)
def test_can_get_parents_and_children() -> None: def labels_match(lhs, rhs) -> bool: return [l.get_label() for l in lhs] == [r.get_label() for r in rhs] parents = (Gaussian(0, 1, label="parent1"), Gaussian(0, 1, label="parent2")) children = tuple( Gaussian(parents[0], parents[1], label=f"child{i}") for i in range(5)) for parent in parents: assert labels_match(parent.iter_children(), children) for child in children: assert labels_match(child.iter_parents(), parents)
def test_thermometers_example(): # %%SNIPPET_START%% PythonTwoThermometers with Model() as m: m.temperature = Uniform(20., 30.) m.first_thermometer = Gaussian(m.temperature, 2.5) m.second_thermometer = Gaussian(m.temperature, 5.) m.first_thermometer.observe(25.) m.second_thermometer.observe(30.) bayes_net = m.to_bayes_net() optimizer = GradientOptimizer(bayes_net) optimizer.max_a_posteriori() calculated_temperature = m.temperature.get_value()
def autocorrelation_example_scalar(): with Model() as m: m.a = Gaussian(20, 1.) m.b = Gaussian(20, 1.) m.c = Gaussian(m.a + m.b, 1.) m.c.observe(43.) m.a.set_value(20.) m.b.set_value(20.) bayes_net = m.to_bayes_net() # %%SNIPPET_START%% PythonScalarAutocorrelation algo = MetropolisHastingsSampler() posterior_samples = sample(net=bayes_net, sample_from=bayes_net.get_latent_vertices(), sampling_algorithm=algo, draws=100) vertex_samples = posterior_samples.get('a') ac = stats.autocorrelation(vertex_samples)
def test_can_pass_vertex_to_vertex(jvm_view: JVMView) -> None: mu = Gaussian(0., 1.) gaussian = Vertex(jvm_view.GaussianVertex, "gaussian", mu, Const(1.)) sample = gaussian.sample() assert type(sample) == numpy_types assert sample.shape == () assert sample.dtype == float
def test_cant_pass_vertex_to_cast_tensor_arg(cast_fn: Callable, cast_to_type: type) -> None: gaussian = Gaussian(0., 1.) with pytest.raises(TypeError, match="Cannot cast {} to {}".format( type(gaussian), cast_to_type)): cast_fn(gaussian)
def test_sample_throws_if_vertices_in_sample_from_are_missing_labels() -> None: sigma = Gamma(1., 1) vertex = Gaussian(0., sigma, label="gaussian") assert sigma.get_label() is None net = BayesNet([sigma, vertex]) with pytest.raises(ValueError, match=r"Vertices in sample_from must be labelled."): samples = sample(net=net, sample_from=net.iter_latent_vertices())
def inference_example_metropolis(): # %%SNIPPET_START%% PythonMetropolisExample with Model() as m: m.a = Gaussian(20., 1.) m.b = Gaussian(20., 1.) m.c = Gaussian(m.a + m.b, 1.) m.c.observe(43.) m.a.set_value(20.) m.b.set_value(20.) bayes_net = m.to_bayes_net() posterior_samples = sample(net=bayes_net, sample_from=bayes_net.get_latent_vertices(), algo="metropolis", draws=100000) average_posterior_a = np.average(posterior_samples.get('a')) average_posterior_b = np.average(posterior_samples.get('b')) actual = average_posterior_a + average_posterior_b
def inference_example_hmc_nuts(): with Model() as m: m.a = Gaussian(20., 1.) m.b = Gaussian(20., 1.) m.c = Gaussian(m.a + m.b, 1.) m.c.observe(43.) m.a.set_value(20.) m.b.set_value(20.) bayes_net = m.to_bayes_net() # %%SNIPPET_START%% PythonHamiltonianExample posterior_samples = sample(net=bayes_net, sample_from=bayes_net.get_latent_vertices(), algo="hamiltonian", draws=2000) # %%SNIPPET_END%% PythonHamiltonianExample # %%SNIPPET_START%% PythonNUTSExample posterior_samples = sample(net=bayes_net, sample_from=bayes_net.get_latent_vertices(), algo="NUTS", draws=2000)
def autocorrelation_example_nd(): with Model() as m: m.a = Gaussian(np.array([[20., 30.], [40., 60.]]), np.array([[1., 1.], [1., 1.]])) bayes_net = m.to_bayes_net() # %%SNIPPET_START%% PythonNdAutocorrelation algo = MetropolisHastingsSampler() posterior_samples = sample(net=bayes_net, sample_from=bayes_net.get_latent_vertices(), sampling_algorithm=algo, draws=100) vertex_samples = posterior_samples.get('a') ac = stats.autocorrelation(vertex_samples, (0, 1))
def test_sampling_returns_multi_indexed_dict_of_list_of_scalars_for_mixed_net( algo: Callable[[BayesNet], PosteriorSamplingAlgorithm]) -> None: exp = Exponential(1.) add_rank_2 = exp + np.array([1., 2., 3., 4.]).reshape((2, 2)) add_rank_3 = exp + np.array([1., 2., 3., 4., 1., 2., 3., 4.]).reshape( (2, 2, 2)) gaussian_rank_2 = Gaussian(add_rank_2, 2.) gaussian_rank_3 = Gaussian(add_rank_3, 1.) exp.set_label("exp") gaussian_rank_2.set_label("gaussian") gaussian_rank_3.set_label("gaussian2") mixed_net = BayesNet(exp.iter_connected_graph()) draws = 5 sample_from = list(mixed_net.iter_latent_vertices()) vertex_labels = [vertex.get_label() for vertex in sample_from] samples = sample(net=mixed_net, sample_from=sample_from, sampling_algorithm=algo(mixed_net), draws=draws) assert type(samples) == dict __assert_valid_samples(draws, samples) assert ('exp', (0, )) in samples for i in (0, 1): for j in (0, 1): assert (('gaussian', (i, j)) in samples) df = pd.DataFrame(samples) expected_num_columns = {"exp": 1, "gaussian": 4, "gaussian2": 8} expected_tuple_size = {"exp": 1, "gaussian": 2, "gaussian2": 3} assert len(df.columns.levels[0]) == 3 for parent_column in df.columns.levels[0]: assert parent_column in vertex_labels assert len( df[parent_column].columns) == expected_num_columns[parent_column] for child_column in df[parent_column].columns: assert type(child_column) == tuple assert len(child_column) == expected_tuple_size[parent_column] assert len(df[parent_column][child_column]) == 5 assert type(df[parent_column][child_column][0]) == np.float64
def test_id_str_of_downstream_vertex_is_higher_than_upstream() -> None: hyper_params = Gaussian(0., 1.) gaussian = Gaussian(0., hyper_params) hyper_params_id = hyper_params.get_id() gaussian_id = gaussian.get_id() assert type(hyper_params_id) == tuple assert type(gaussian_id) == tuple assert hyper_params_id < gaussian_id
def test_to_bayes_net() -> None: with Model() as m: m.mu = Exponential(1.) m.sigma = Gamma(0.5, 0.1) m.gaussian = Gaussian(m.mu, m.sigma) net = m.to_bayes_net() assert isinstance(net, BayesNet) net_vertex_ids = [ vertex.get_id() for vertex in net.get_latent_or_observed_vertices() ] assert len(net_vertex_ids) == 3 assert m.mu.get_id() in net_vertex_ids assert m.sigma.get_id() in net_vertex_ids assert m.gaussian.get_id() in net_vertex_ids
def tensor_example_creation(): # %%SNIPPET_START%% PythonVertexFromNDArray mu = np.array([[2., 3., 4.], [1., 4., 7.]]) sigma = np.ones([2, 3]) g = Gaussian(mu, sigma)
def test_can_pass_none_label() -> None: vertex = Gaussian(0., 1., label=None) assert vertex.get_label() == None
def test_can_pass_pandas_series_to_vertex() -> None: gaussian = Gaussian(pd.Series(data=[0.1, 0.4]), pd.Series(data=[0.1, 0.4])) sample = gaussian.sample() assert sample.shape == (2, )
def test_can_pass_namespaced_label() -> None: vertex = Gaussian(0., 1., label="outer.inner.foo") assert vertex.get_label() == "outer.inner.foo" assert vertex.unwrap().getLabel().getUnqualifiedName() == "foo"
def test_can_pass_pandas_dataframe_to_vertex() -> None: gaussian = Gaussian(pd.DataFrame(data=[0.1, 0.4]), pd.DataFrame(data=[0.1, 0.4])) sample = gaussian.sample() assert sample.shape == (2, 1)
def test_can_pass_ndarray_to_vertex() -> None: gaussian = Gaussian(np.array([0.1, 0.4]), np.array([0.4, 0.5])) sample = gaussian.sample() assert sample.shape == (2, )
def test_set_label() -> None: label = "gaussian_vertex" vertex = Gaussian(0., 1.) vertex.set_label(label) assert vertex.get_label() == label
def test_cannot_set_none_label() -> None: vertex = Gaussian(0., 1., label="gaussian") with pytest.raises(ValueError, match=r"label cannot be None"): vertex.set_label(None)
def test_java_vertex_to_python_vertex_persists_label() -> None: label = "gaussian" java_vertex = Gaussian(0., 1., label=label).unwrap() python_vertex = Vertex._from_java_vertex(java_vertex) assert python_vertex.get_label() == label