示例#1
0
def test_gauss_2d_data_dimension():
    """Test the data dimensionality output of the Gauss Simulator using a 2D Gaussian
    """
    dim = 2
    s = Gauss(dim=dim)

    n_samples = 10
    thetas = np.tile(np.array([0., 1.]), (n_samples, 1))
    sample_list = s.gen(thetas)

    assert sample_list[0][0]['data'].shape == (dim, ), \
        'the dimensionality of the data is wrong. ' \
        'should be {} is {}'.format((dim, 1), sample_list[0][0]['data'].shape)
示例#2
0
def test_gauss_1d_simulator_output():
    """Test the output of the simulator using the example of a 1D Gaussian
    """
    dim = 1
    s = Gauss(dim=dim)

    n_samples = 10
    thetas = np.tile(np.array([0.]), (n_samples, 1))
    sample_list = s.gen(thetas)

    assert len(
        sample_list
    ) == n_samples, 'the list should have as many entries as there are samples'
    assert isinstance(sample_list[0][0],
                      dict), 'the entries should be dictionaries'
示例#3
0
def dont_test_apt_inference_atomicprop_maf_normalize(n_params, seed=47):
    # normalization test is not finished yet.
    m = Gauss(dim=n_params, noise_cov=0.1)
    p = dd.Uniform(lower=-0.05 * np.ones(n_params),
                   upper=0.05 * np.ones(n_params))
    s = ds.Identity()
    g = dg.Default(model=m, prior=p, summary=s)
示例#4
0
def test_IndependentJoint_uniform_rejection():
    # check that proposed samples are correctly rejected when using a
    # IndependentJoint prior with some child distributions uniform. We used a
    # Gaussian proposal to generate some samples that need to be rejected.
    N = 1000
    B1 = [-1.0, 1.0]
    B2 = [-2.0, 2.0]
    u1 = dd.Uniform(B1[0], B1[1])
    u2 = dd.Uniform(B2[0], B2[1])
    prior = dd.IndependentJoint([u1, u2])

    m = [0., 0.]
    S = [[
        2.,
        0.,
    ], [
        0.,
        2.,
    ]]
    proposal = dd.Gaussian(m=m, S=S)

    model = Gauss(dim=2)

    s = ds.Identity()

    g = dg.Default(model=model, prior=prior, summary=s)
    g.proposal = proposal

    params, stats = g.gen(N, verbose=False)
    assert (params.min(axis=0) >= np.array([B1[0], B2[0]])).all() and \
        (params.min(axis=0) <= np.array([B1[1], B2[1]])).all(), \
        "rejection failed"
示例#5
0
def test_trainer_updates():
    n_components = 1
    n_params = 2
    seed = 42
    svi = True

    m = Gauss(dim=n_params)
    p = dd.Gaussian(m=np.zeros((n_params, )), S=np.eye(n_params))
    s = ds.Identity()
    g = dg.Default(model=m, prior=p, summary=s)

    nn = NeuralNet(
        n_components=n_components,
        n_hiddens=[10],
        n_inputs=n_params,
        n_outputs=n_params,
        seed=seed,
        svi=svi)
    loss = -tt.mean(nn.lprobs)

    trn_inputs = [nn.params, nn.stats]
    trn_data = g.gen(100)  # params, stats
    trn_data = tuple(x.astype(dtype) for x in trn_data)

    t = Trainer(network=nn, loss=loss, trn_data=trn_data, trn_inputs=trn_inputs)

    # single update
    outputs = t.make_update(*trn_data)

    # training
    outputs = t.train(100, 50)
示例#6
0
def test_mpgen(n_samples=1000, n_params=2, n_cores=4, seed=500):
    p = dd.Gaussian(m=np.zeros((n_params, )), S=np.eye(n_params), seed=seed)
    s = ds.Identity(seed=seed + 1)

    mlist = [Gauss(dim=n_params, seed=seed + 2 + i) for i in range(n_cores)]
    g = dg.MPGenerator(models=mlist,
                       prior=p,
                       summary=s,
                       seed=seed + 2 + n_cores)
    params, stats = g.gen(n_samples, verbose=False)

    # make sure the different models are providing different outputs
    assert np.unique(params.size) == params.size
    assert np.unique(stats.size) == stats.size
示例#7
0
def test_gauss_shape():
    for n_params in range(1, 3):
        m = Gauss(dim=n_params)
        p = dd.Gaussian(m=np.zeros((n_params, )), S=np.eye(n_params))
        s = ds.Identity()

        g = dg.Default(model=m, prior=p, summary=s)

        n_samples = 100
        params, stats = g.gen(n_samples)

        n_summary = n_params
        assert params.shape == (n_samples, n_params)
        assert stats.shape == (n_samples, n_summary)
示例#8
0
def init_all_gaussian(n_params=2,
                      seed=42,
                      inferenceobj=None,
                      **inf_setup_opts):
    model = Gauss(dim=n_params, seed=seed)
    prior = dd.Gaussian(m=np.zeros((n_params, )),
                        S=np.eye(n_params),
                        seed=seed + 1)
    s = ds.Identity(seed=seed + 2)
    g = dg.Default(model=model, prior=prior, summary=s, seed=seed + 3)
    obs = np.zeros((1, n_params))  # reseed generator etc. (?)

    res = inferenceobj(g, obs=obs, seed=seed + 4, **inf_setup_opts)
    res.reset(seed=seed + 4)

    m_true, S_true = simplegaussprod(obs, model.noise_cov, prior.m, prior.S)
    return res, m_true, S_true
示例#9
0
def test_basic_inference(n_params=2, seed=42):
    m = Gauss(dim=n_params, seed=seed)
    p = dd.Gaussian(m=np.zeros((n_params, )), S=np.eye(n_params), seed=seed)
    s = ds.Identity()
    g = dg.Default(model=m, prior=p, summary=s)

    # set up inference
    res = infer.Basic(g, seed=seed)

    # run with N samples
    out = res.run(1000)

    # check result
    posterior = res.predict(np.array([0., 0.]).reshape(1, -1))
    assert np.allclose(posterior.xs[0].S,
                       np.array([[0.1, 0.0], [0.0, 0.1]]),
                       atol=0.05)
    assert np.allclose(posterior.xs[0].m, np.array([0.0, 0.0]), atol=0.05)
示例#10
0
def test_basic_inference_inputsamples(n_params=2, seed=42, n_pilot=1000):
    model = Gauss(dim=n_params, seed=seed)
    prior = dd.Gaussian(m=np.zeros((n_params, )),
                        S=np.eye(n_params),
                        seed=seed + 1)
    s = ds.Identity(seed=seed + 2)
    g = dg.Default(model=model, prior=prior, summary=s, seed=seed + 3)
    obs = np.zeros((1, n_params))  # reseed generator etc. (?)
    m_true, S_true = simplegaussprod(obs, model.noise_cov, prior.m, prior.S)

    params, stats = g.gen(n_pilot)
    pilot_samples = (params, stats)

    res = infer.Basic(g, obs=obs, seed=seed + 4, pilot_samples=pilot_samples)
    res.reset(seed=seed + 4)

    out = res.run(n_train=1000)
    posterior = res.predict(res.obs.reshape(1, -1))

    check_gaussian_posterior(posterior, m_true, S_true)
示例#11
0
def test_snpe_inference(n_params=2, seed=42):
    m = Gauss(dim=n_params, seed=seed)
    p = dd.Gaussian(m=np.zeros((n_params, )), S=np.eye(n_params), seed=seed)
    s = ds.Identity()
    g = dg.Default(model=m, prior=p, summary=s)

    # observation
    _, obs = g.gen(1)

    # set up inference
    res = infer.SNPE(g, obs=obs)

    # run with N samples
    out = res.run(n_train=1000, n_rounds=1)

    # check result
    posterior = res.predict(np.array([0., 0.]).reshape(1, -1))
    assert np.allclose(posterior.xs[0].S,
                       np.array([[0.1, 0.0], [0.0, 0.1]]),
                       atol=0.05)
    assert np.allclose(posterior.xs[0].m, np.array([0.0, 0.0]), atol=0.05)