示例#1
0
文件: main.py 项目: dfm/flicker
def MCMC(whichx, nsamp, fname, nd, bigdata, burnin=500, run=500):

    rho_pars = [-2., 6., .0065]
    logg_pars = [-1.850, 7., .0065]
    pars_init = logg_pars
    if whichx == "rho":
        pars_init = rho_pars

    x, y, xerr, yerr = load_data(whichx, nd=nd, bigdata=True)

    # format data and generate samples
    obs = np.vstack((x, y))
    u = np.vstack((xerr, yerr))
    up = np.vstack((xerr, yerr))
    um = np.vstack((xerr*.5, yerr*.5))
#     s = generate_samples_log(obs, up, um, nsamp) # FIXME
    s = generate_samples(obs, u, nsamp) # FIXME
#     if nsamp == 1:
#         s[0, :, :] = x
#         s[1, :, :] = y
#     print np.shape(s)
#     assert 0

    # set up and run emcee
    ndim, nwalkers = len(pars_init), 32
    pos = [pars_init + 1e-4*np.random.randn(ndim) for i in range(nwalkers)]
    sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob,
                                    args=(s, obs, u))
    print "burning in..."
    pos, _, _, = sampler.run_mcmc(pos, burnin)
    sampler.reset()
    print "production run..."
    sampler.run_mcmc(pos, run)
    samp = sampler.chain[:, 50:, :].reshape((-1, ndim))
    m, c, sig = map(lambda v: (v[1], v[2]-v[1], v[1]-v[0]),
               zip(*np.percentile(samp, [16, 50, 84], axis=0)))
    pars = [m[0], c[0], sig[0]]

    # save samples
    f = h5py.File("%s_samples_%s.h5" % (whichx, fname), "w")
    data = f.create_dataset("samples", np.shape(samp))
    data[:, 0] = samp[:, 0]
    data[:, 1] = samp[:, 1]
    data[:, 2] = samp[:, 2]
    f.close()
示例#2
0
文件: main.py 项目: RuthAngus/flicker
def MCMC(whichx, nsamp, fname, nd, bigdata, burnin=500, run=500):

    rho_pars = [-2., 6., .0065]
    logg_pars = [-1.850, 7., .0065]
    pars_init = logg_pars
    if whichx == "rho":
        pars_init = rho_pars

    x, y, xerr, yerr = load_data(whichx, nd=nd, bigdata=True)

    # format data and generate samples
    obs = np.vstack((x, y))
    u = np.vstack((xerr, yerr))
    up = np.vstack((xerr, yerr))
    um = np.vstack((xerr * .5, yerr * .5))
    #     s = generate_samples_log(obs, up, um, nsamp) # FIXME
    s = generate_samples(obs, u, nsamp)  # FIXME
    #     if nsamp == 1:
    #         s[0, :, :] = x
    #         s[1, :, :] = y
    #     print np.shape(s)
    #     assert 0

    # set up and run emcee
    ndim, nwalkers = len(pars_init), 32
    pos = [pars_init + 1e-4 * np.random.randn(ndim) for i in range(nwalkers)]
    sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=(s, obs, u))
    print "burning in..."
    pos, _, _, = sampler.run_mcmc(pos, burnin)
    sampler.reset()
    print "production run..."
    sampler.run_mcmc(pos, run)
    samp = sampler.chain[:, 50:, :].reshape((-1, ndim))
    m, c, sig = map(lambda v: (v[1], v[2] - v[1], v[1] - v[0]),
                    zip(*np.percentile(samp, [16, 50, 84], axis=0)))
    pars = [m[0], c[0], sig[0]]

    # save samples
    f = h5py.File("%s_samples_%s.h5" % (whichx, fname), "w")
    data = f.create_dataset("samples", np.shape(samp))
    data[:, 0] = samp[:, 0]
    data[:, 1] = samp[:, 1]
    data[:, 2] = samp[:, 2]
    f.close()
示例#3
0

def lnprob(pars, samples, obs, u):
    return lnlikeH(pars, samples, obs, u) + lnprior(pars)


if __name__ == "__main__":

    # load data
    # x = f, y = rho, z = teff
    x, xerr, y, yerr, z, zerr = np.genfromtxt("data/log.dat").T

    obs = np.vstack((x, y))
    u = np.vstack((xerr, yerr))
    nsamp = 3
    s = generate_samples(obs, u, nsamp)

    pars_init = [-1.850, 5.413, .065]

    ndim, nwalkers = len(pars_init), 32
    pos = [pars_init + 1e-4 * np.random.randn(ndim) for i in range(nwalkers)]
    sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=(s, obs, u))

    print "burning in..."
    pos, _, _, = sampler.run_mcmc(pos, 500)
    sampler.reset()
    print "production run..."
    sampler.run_mcmc(pos, 1000)
    samp = sampler.chain[:, 50:, :].reshape((-1, ndim))
    m, c, lnf = map(lambda v: (v[1], v[2] - v[1], v[1] - v[0]),
                    zip(*np.percentile(samp, [16, 50, 84], axis=0)))
示例#4
0
def lnprior(pars):
    return 0.

def lnprob(pars, samples, obs, u):
    return lnlikeH(pars, samples, obs, u) + lnprior(pars)

if __name__ == "__main__":

    # load data
    # x = f, y = rho, z = teff
    x, xerr, y, yerr, z, zerr = np.genfromtxt("data/log.dat").T

    obs = np.vstack((x, y))
    u = np.vstack((xerr, yerr))
    nsamp = 3
    s = generate_samples(obs, u, nsamp)

    pars_init = [-1.850, 5.413, .065]

    ndim, nwalkers = len(pars_init), 32
    pos = [pars_init + 1e-4*np.random.randn(ndim) for i in range(nwalkers)]
    sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob,
                                    args=(s, obs, u))

    print "burning in..."
    pos, _, _, = sampler.run_mcmc(pos, 500)
    sampler.reset()
    print "production run..."
    sampler.run_mcmc(pos, 1000)
    samp = sampler.chain[:, 50:, :].reshape((-1, ndim))
    m, c, lnf = map(lambda v: (v[1], v[2]-v[1], v[1]-v[0]),
示例#5
0
def MCMC(whichx, nsamp, fname, nd, extra, f, bigdata, burnin=500, run=1000):
    """
    nsamp (int) = number of samples.
    whichx (str) = logg or rho.
    fname (str) = the name for saving all output
    nd (int) = number of data points (for truncation).
    If this is zero, all the data are used.
    bigdata (boolean) which data file to use.
    """

    # set initial parameters
    if extra:
        rho_pars = [-1.793214679, 5.34215688, 0.02334097, .0002600777]
        logg_pars = [-1.02143776, 5.69156647, .24239756, .049233887]
    else:
        rho_pars = [-1.69293833, 5.1408906, .0065]
        logg_pars = [-1.05043614, 5.66819525, .0065]
    pars_init = logg_pars
    if whichx == "rho":
        pars_init = rho_pars

    # load the data
    x, y, xerr, yerr = load_data(whichx, nd=nd, bigdata=False)

    # format data and generate samples
    obs = np.vstack((x, y))
    u = np.vstack((xerr, yerr))
    up = np.vstack((xerr, yerr))
    um = np.vstack((xerr*.5, yerr*.5))
    s = generate_samples(obs, u, nsamp)

    # set up and run emcee
    ndim, nwalkers = len(pars_init), 32
    pos = [pars_init + 1e-4*np.random.randn(ndim) for i in range(nwalkers)]
    sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob,
                                    args=(s, obs, u, extra, f))
    print "burning in..."
    pos, _, _, _ = sampler.run_mcmc(pos, burnin)
    sampler.reset()
    print "production run..."
    sampler.run_mcmc(pos, run)

    # load likelihood
    lls = sampler.blobs
    flat_lls = np.reshape(lls, (np.shape(lls)[0]*np.shape(lls)[1]))
    samp = np.vstack((sampler.chain[:, :, :].reshape(-1, ndim).T, flat_lls)).T

    sa = samp.T[0]
    print type(sa)
    print np.isfinite(sa)
    print sa
    print np.shape(sa), np.shape(sa[np.isfinite(sa)])
    # save samples
    f = h5py.File("%s_samples_%s.h5" % (whichx, fname), "w")
    data = f.create_dataset("samples", np.shape(samp))
    data[:, 0] = samp[:, 0]
    data[:, 1] = samp[:, 1]
    data[:, 2] = samp[:, 2]
    data[:, 3] = samp[:, 3]
    print "samp shape = ", np.shape(samp), np.shape(data)
    if extra:
        data[:, 4] = samp[:, 4]
    f.close()
示例#6
0
def MCMC(whichx, nsamp, fname, nd, extra, f, bigdata, burnin=500, run=1000):
    """
    nsamp (int) = number of samples.
    whichx (str) = logg or rho.
    fname (str) = the name for saving all output
    nd (int) = number of data points (for truncation).
    If this is zero, all the data are used.
    bigdata (boolean) which data file to use.
    """

    # set initial parameters
    if extra:
        rho_pars = [-1.793214679, 5.34215688, 0.02334097, .0002600777]
        logg_pars = [-1.02143776, 5.69156647, .24239756, .049233887]
    else:
        rho_pars = [-1.69293833, 5.1408906, .0065]
        logg_pars = [-1.05043614, 5.66819525, .0065]
    pars_init = logg_pars
    if whichx == "rho":
        pars_init = rho_pars

    # load the data
    x, y, xerr, yerr = load_data(whichx, nd=nd, bigdata=False)

    # format data and generate samples
    obs = np.vstack((x, y))
    u = np.vstack((xerr, yerr))
    up = np.vstack((xerr, yerr))
    um = np.vstack((xerr * .5, yerr * .5))
    s = generate_samples(obs, u, nsamp)

    # set up and run emcee
    ndim, nwalkers = len(pars_init), 32
    pos = [pars_init + 1e-4 * np.random.randn(ndim) for i in range(nwalkers)]
    sampler = emcee.EnsembleSampler(nwalkers,
                                    ndim,
                                    lnprob,
                                    args=(s, obs, u, extra, f))
    print "burning in..."
    pos, _, _, _ = sampler.run_mcmc(pos, burnin)
    sampler.reset()
    print "production run..."
    sampler.run_mcmc(pos, run)

    # load likelihood
    lls = sampler.blobs
    flat_lls = np.reshape(lls, (np.shape(lls)[0] * np.shape(lls)[1]))
    samp = np.vstack((sampler.chain[:, :, :].reshape(-1, ndim).T, flat_lls)).T

    sa = samp.T[0]
    print type(sa)
    print np.isfinite(sa)
    print sa
    print np.shape(sa), np.shape(sa[np.isfinite(sa)])
    # save samples
    f = h5py.File("%s_samples_%s.h5" % (whichx, fname), "w")
    data = f.create_dataset("samples", np.shape(samp))
    data[:, 0] = samp[:, 0]
    data[:, 1] = samp[:, 1]
    data[:, 2] = samp[:, 2]
    data[:, 3] = samp[:, 3]
    print "samp shape = ", np.shape(samp), np.shape(data)
    if extra:
        data[:, 4] = samp[:, 4]
    f.close()