def test_vae(): minibatch_size = 10 data = pm.floatX(np.random.rand(100)) x_mini = pm.Minibatch(data, minibatch_size) x_inp = at.vector() x_inp.tag.test_value = data[:minibatch_size] ae = aesara.shared(pm.floatX([0.1, 0.1])) be = aesara.shared(pm.floatX(1.0)) ad = aesara.shared(pm.floatX(1.0)) bd = aesara.shared(pm.floatX(1.0)) enc = x_inp.dimshuffle(0, "x") * ae.dimshuffle("x", 0) + be mu, rho = enc[:, 0], enc[:, 1] with pm.Model(): # Hidden variables zs = pm.Normal("zs", mu=0, sigma=1, size=minibatch_size) dec = zs * ad + bd # Observation model pm.Normal("xs_", mu=dec, sigma=0.1, observed=x_inp) pm.fit( 1, local_rv={zs: dict(mu=mu, rho=rho)}, more_replacements={x_inp: x_mini}, more_obj_params=[ae, be, ad, bd], )
def test_discrete_not_allowed(): mu_true = np.array([-2, 0, 2]) z_true = np.random.randint(len(mu_true), size=100) y = np.random.normal(mu_true[z_true], np.ones_like(z_true)) with pm.Model(): mu = pm.Normal("mu", mu=0, sigma=10, size=3) z = pm.Categorical("z", p=at.ones(3) / 3, size=len(y)) pm.Normal("y_obs", mu=mu[z], sigma=1.0, observed=y) with pytest.raises(opvi.ParametrizationError): pm.fit(n=1) # fails
def test_fit_with_nans(score): X_mean = pm.floatX(np.linspace(0, 10, 10)) y = pm.floatX(np.random.normal(X_mean * 4, 0.05)) with pm.Model(): inp = pm.Normal("X", X_mean, size=X_mean.shape) coef = pm.Normal("b", 4.0) mean = inp * coef pm.Normal("y", mean, 0.1, observed=y) with pytest.raises(FloatingPointError) as e: advi = pm.fit(100, score=score, obj_optimizer=pm.adam(learning_rate=float("nan")))
def test_var_replacement(): X_mean = pm.floatX(np.linspace(0, 10, 10)) y = pm.floatX(np.random.normal(X_mean * 4, 0.05)) with pm.Model(): inp = pm.Normal("X", X_mean, size=X_mean.shape) coef = pm.Normal("b", 4.0) mean = inp * coef pm.Normal("y", mean, 0.1, observed=y) advi = pm.fit(100) assert advi.sample_node(mean).eval().shape == (10,) x_new = pm.floatX(np.linspace(0, 10, 11)) assert advi.sample_node(mean, more_replacements={inp: x_new}).eval().shape == (11,)