Exemplo n.º 1
0
def per_iteration(lmparams, i, resids, mod_pars, photo_data, rv_data, *args, **kws):
    if i%10 == 0.0:
        ncores, fname = args

        params = utilfuncs.get_lmfit_parameters(mod_pars, lmparams)

        redchisqr = utilfuncs.reduced_chisqr(mod_pars, params, photo_data, rv_data)
        utilfuncs.iterprint(mod_pars, params, 0.0, redchisqr, 0.0, 0.0)
        utilfuncs.report_as_input(mod_pars, params, fname)
Exemplo n.º 2
0
def per_iteration(mod_pars, theta, lnl, model):
    global max_lnlike
    if lnl > max_lnlike:
        max_lnlike = lnl
        params = utilfuncs.split_parameters(theta, mod_pars[0])
        redchisqr = np.sum(((photo_data[1] - model) / photo_data[2]) ** 2) / \
                    (photo_data[1].size - 1 - (mod_pars[0] * 5 + (mod_pars[0] - 1) * 6))

        utilfuncs.iterprint(mod_pars, params, max_lnlike, redchisqr, 0.0, 0.0)
        utilfuncs.report_as_input(mod_pars, params, fname)
Exemplo n.º 3
0
def generate(mod_pars, body_pars, photo_data, rv_data, nwalkers, ncores, fname, niterations=500):
    # Flatten body parameters
    theta = np.array(list(itertools.chain.from_iterable(body_pars)))

    # Set up the sampler.
    ndim = len(theta)
    theta[theta == 0.0] = 1.0e-10
    pos0 = [theta + theta * 1.0e-3 * np.random.randn(ndim) for i in range(nwalkers)]

    sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=(mod_pars, photo_data, rv_data), threads=ncores)

    # Clear and run the production chain.
    print("Running MCMC...")

    # Make sure paths exist
    if not os.path.exists("./output/{0}/reports".format(fname)):
        os.makedirs(os.path.join("./", "{0}".format(fname), "output", "reports"))

    # Setup some values for tracking time and completion
    citer, tlast, tsum = 0.0, time.time(), []

    for pos, lnp, state in sampler.sample(pos0, iterations=niterations, storechain=True):
        # Save out the chain for later analysis
        with open("./output/{0}/reports/mcmc_chain.dat".format(fname), "a+") as f:
            for k in range(pos.shape[0]):
                f.write("{0:4d} {1:s}\n".format(k, " ".join(map(str, pos[k]))))

        citer += 1.0
        tsum.append(time.time() - tlast)
        tleft = np.median(tsum) * (niterations - citer)
        tlast = time.time()

        maxlnprob = np.argmax(lnp)
        bestpos = pos[maxlnprob, :]

        params = utilfuncs.split_parameters(bestpos, mod_pars[0])

        redchisqr = utilfuncs.reduced_chisqr(mod_pars, params, photo_data, rv_data)

        utilfuncs.iterprint(mod_pars, params, lnp[maxlnprob], redchisqr, citer / niterations, tleft)
        utilfuncs.report_as_input(mod_pars, params, fname)

    # Remove 'burn in' region
    print('Burning in; creating sampler chain...')

    burnin = int(0.5 * niterations)
    samples = sampler.chain[:, burnin:, :].reshape((-1, ndim))

    # Compute the quantiles.
    print('Computing quantiles; mapping results...')

    results = map(
        lambda v: (v[1], v[2] - v[1], v[1] - v[0]),
        zip(*np.percentile(samples, [16, 50, 84],
                           axis=0))
    )

    # Produce final model and save the values
    print('Saving final results...')

    utilfuncs.mcmc_report_out(mod_pars, results, fname)
    utilfuncs.plot_out(params, fname, sampler, samples, ndim)