예제 #1
0
def test_mcmc(core, likelihood_coeval, default_params, tmpdirec):
    chain = mcmc.run_mcmc(
        core,
        likelihood_coeval,
        model_name="TEST",
        continue_sampling=False,
        datadir=str(tmpdirec),
        params=default_params,
        walkersRatio=2,
        burninIterations=0,
        sampleIterations=2,
        threadCount=1,
    )

    samples_from_chain = mcmc.get_samples(chain)
    samples_from_file = mcmc.get_samples(tmpdirec / "TEST")

    # make sure reading from file is the same as the chain.
    assert samples_from_chain.iteration == samples_from_file.iteration
    assert np.all(samples_from_file.accepted == samples_from_chain.accepted)
    assert np.all(
        samples_from_file.get_chain() == samples_from_chain.get_chain())

    assert all(c in ["HII_EFF_FACTOR", "ION_Tvir_MIN"]
               for c in samples_from_chain.param_names)
    assert samples_from_chain.has_blobs
    assert samples_from_chain.param_guess["HII_EFF_FACTOR"] == 30.0
    assert samples_from_chain.param_guess["ION_Tvir_MIN"] == 4.7
예제 #2
0
def test_load_chain(core, likelihood_coeval, default_params, tmpdirec):
    mcmc.run_mcmc(
        core,
        likelihood_coeval,
        model_name="TESTLOADCHAIN",
        continue_sampling=False,
        datadir=tmpdirec,
        params=default_params,
        walkersRatio=2,
        burninIterations=0,
        sampleIterations=1,
        threadCount=1,
    )

    lcc = mcmc.load_primitive_chain("TESTLOADCHAIN", direc=tmpdirec)

    assert lcc.getCoreModules()[0].redshift == core.redshift
예제 #3
0
def test_multinest():

    model_name = "LuminosityLikelihood"
    redshifts = [6, 7, 8, 10]
    F_STAR10 = [-1.3, -3, 0, 1.0]
    ALPHA_STAR = [0.5, -0.5, 1.0, 1.0]
    M_TURN = [8.69897, 8, 10, 1.0]
    t_STAR = [0.5, 0.01, 1, 0.3]

    mcmc_options = {
        "n_live_points": 10,
        "max_iter": 10,
    }
    mcmc.run_mcmc(
        [
            mcmc.CoreLuminosityFunction(redshift=z, sigma=0, name="lfz%d" % z)
            for z in redshifts
        ],
        [
            mcmc.LikelihoodLuminosityFunction(name="lfz%d" % z)
            for z in redshifts
        ],
        model_name=model_name,
        params={
            "F_STAR10": F_STAR10,
            "ALPHA_STAR": ALPHA_STAR,
            "M_TURN": M_TURN,
            "t_STAR": t_STAR,
        },
        use_multinest=True,
        **mcmc_options,
    )

    import pymultinest

    nest = pymultinest.Analyzer(4,
                                outputfiles_basename="./MultiNest/%s" %
                                model_name)
    data = nest.get_data()

    assert data.shape[1] == 6
예제 #4
0
def test_bad_continuation(core, likelihood_coeval, default_params, tmpdirec):
    "check if trying to continue a chain that isn't compatible with previous chain raises an error"

    mcmc.run_mcmc(
        core,
        likelihood_coeval,
        model_name="TESTBURNIN",
        continue_sampling=False,
        datadir=tmpdirec,
        params=default_params,
        walkersRatio=2,
        burninIterations=0,
        sampleIterations=1,
        threadCount=1,
    )

    with pytest.raises(RuntimeError):
        # core with different redshift
        core = mcmc.CoreCoevalModule(
            redshift=8,
            user_params={
                "HII_DIM": 35,
                "DIM": 70
            },
            cache_mcmc=False,
            cache_init=False,
        )
        mcmc.run_mcmc(
            core,
            likelihood_coeval,
            model_name="TESTBURNIN",
            continue_sampling=True,
            datadir=tmpdirec,
            params=default_params,
            walkersRatio=2,
            burninIterations=0,
            sampleIterations=2,
            threadCount=1,
        )
예제 #5
0
def test_continue_burnin(core, likelihood_coeval, default_params, tmpdirec):
    with pytest.raises(
            AssertionError):  # needs to be sampled for at least 1 iteration!
        mcmc.run_mcmc(
            core,
            likelihood_coeval,
            model_name="TESTBURNIN",
            continue_sampling=False,
            datadir=str(tmpdirec),
            params=default_params,
            walkersRatio=2,
            burninIterations=1,
            sampleIterations=0,
            threadCount=1,
        )

    chain = mcmc.run_mcmc(
        core,
        likelihood_coeval,
        model_name="TESTBURNIN",
        continue_sampling=False,
        datadir=str(tmpdirec),
        params=default_params,
        walkersRatio=2,
        burninIterations=1,
        sampleIterations=1,
        threadCount=1,
    )

    # HAVE TO SAVE THE CHAIN TO MEMORY HERE, BECAUSE THE OBJECT ACCESS THE FILE ON EVERY CALL,
    # WHICH MEANS IT CONSTANTLY UPDATES
    chain_b_chain = mcmc.get_samples(chain, burnin=True).get_chain()
    chain_s_chain = mcmc.get_samples(chain).get_chain()

    chain2 = mcmc.run_mcmc(
        core,
        likelihood_coeval,
        model_name="TESTBURNIN",
        continue_sampling=True,
        datadir=tmpdirec,
        params=default_params,
        walkersRatio=2,
        burninIterations=2,
        sampleIterations=1,
        threadCount=1,
    )

    burnin2 = mcmc.get_samples(chain2, burnin=True)
    chain2_b_chain = burnin2.get_chain()
    chain2_s_chain = mcmc.get_samples(chain).get_chain()

    assert likelihood_coeval._simulate is False  # because we're continuing sampling
    assert burnin2.iteration == 2
    assert np.all(chain2_b_chain[:1] ==
                  chain_b_chain)  # first 5 iteration should be unchanged

    # The actual samples *should* have been deleted, because they have different burnin times.
    assert not np.all(chain_s_chain == chain2_s_chain)

    chain3 = mcmc.run_mcmc(
        core,
        likelihood_coeval,
        model_name="TESTBURNIN",
        continue_sampling=True,
        datadir=tmpdirec,
        params=default_params,
        walkersRatio=2,
        burninIterations=2,
        sampleIterations=2,
        threadCount=1,
    )

    samples3 = chain3.samples
    assert samples3.iteration == 2

    chain3_b_chain = mcmc.get_samples(chain3, burnin=True).get_chain()
    assert np.all(chain3_b_chain == chain2_b_chain)

    chain3_s_chain = mcmc.get_samples(chain3).get_chain()
    assert np.all(chain2_s_chain == chain3_s_chain[:1])

    with pytest.raises(
            ValueError
    ):  # don't run if we already have all samples, and let the user know!
        mcmc.run_mcmc(
            core,
            likelihood_coeval,
            model_name="TESTBURNIN",
            continue_sampling=True,
            datadir=tmpdirec,
            params=default_params,
            walkersRatio=2,
            burninIterations=2,
            sampleIterations=2,
            threadCount=1,
        )

    # We set the _simulate back to True to have no side-effects.
    likelihood_coeval._simulate = True