def eager_affine_normal(matrix, loc, scale, value_x, value_y): assert len(matrix.output.shape) == 2 assert value_x.output == reals(matrix.output.shape[0]) assert value_y.output == reals(matrix.output.shape[1]) loc += value_x @ matrix int_inputs, (loc, scale) = align_tensors(loc, scale, expand=True) i_name = gensym("i") y_name = gensym("y") y_i_name = gensym("y_i") int_inputs[i_name] = bint(value_y.output.shape[0]) loc = Tensor(loc, int_inputs) scale = Tensor(scale, int_inputs) y_dist = Independent(Normal(loc, scale, y_i_name), y_name, i_name, y_i_name) return y_dist(**{y_name: value_y})
def test_memoize_sample(check_sample): with memoize(): m, s = torch.tensor(0.), torch.tensor(1.) j1 = Normal(m, s, 'x') j2 = Normal(m, s, 'x') x1 = j1.sample(frozenset({'x'})) x12 = j1.sample(frozenset({'x'})) x2 = j2.sample(frozenset({'x'})) # this assertion now passes assert j1 is j2 # these assertions fail because sample is not memoized if check_sample: assert x1 is x12 assert x1 is x2
def _normal_to_funsor(pyro_dist, event_inputs=()): loc = tensor_to_funsor(pyro_dist.loc, event_inputs) scale = tensor_to_funsor(pyro_dist.scale, event_inputs) return Normal(loc, scale)