Пример #1
0
def generate_figure_samples(samples_per_frame, n_frames, burnin = int(1e4)):
    """ Generates the figure

    :param samples_per_frame: number of sample steps between each frame
    :param n_frames: number of frames to draw
    :returns: None
    :rtype: None
    """
    n_samples = samples_per_frame * n_frames
    ndims = 36
    nbasis = 72

    rand_val = rand(ndims,nbasis/2,density=0.25)
    W = np.concatenate([rand_val.toarray(), -rand_val.toarray()],axis=1)
    logalpha = np.random.randn(nbasis, 1)
    poe = ProductOfT(nbatch=1, W=W, logalpha=logalpha)

    ## NUTS uses a different number of grad evals for each update step!!
    ## makes it very hard to compare against others w/ same number of update steps
    # # NUTS
    # print "NUTS"
    # nuts_init = poe.Xinit[:, 0]
    # nuts_samples = nuts6(poe.reset(), n_samples, nuts_burnin, nuts_init)[0]
    # nuts_frames = [nuts_samples[f_idx * samples_per_frame, :] for f_idx in xrange(0, n_frames)]
    # x_init = nuts_samples[0, :].reshape(ndims, 1)

    ## burnin
    print "MJHMC burnin"
    x_init = poe.Xinit #[:, [0]]
    mjhmc = MarkovJumpHMC(distribution=poe.reset(), **mjhmc_params)
    mjhmc.state = HMCState(x_init.copy(), mjhmc)
    mjhmc_samples = mjhmc.sample(burnin)
    print mjhmc_samples.shape
    x_init = mjhmc_samples[:, [0]]

    # control HMC
    print "Control"
    hmc = ControlHMC(distribution=poe.reset(), **control_params)
    hmc.state = HMCState(x_init.copy(), hmc)
    hmc_samples = hmc.sample(n_samples)
    hmc_frames = [hmc_samples[:, f_idx * samples_per_frame].copy() for f_idx in xrange(0, n_frames)]

    # MJHMC
    print "MJHMC"
    mjhmc = MarkovJumpHMC(distribution=poe.reset(), resample=False, **mjhmc_params)
    mjhmc.state = HMCState(x_init.copy(), mjhmc)
    mjhmc_samples = mjhmc.sample(n_samples)
    mjhmc_frames = [mjhmc_samples[:, f_idx * samples_per_frame].copy() for f_idx in xrange(0, n_frames)]

    print mjhmc.r_count, hmc.r_count
    print mjhmc.l_count, hmc.l_count
    print mjhmc.f_count, hmc.f_count
    print mjhmc.fl_count, hmc.fl_count


    frames = [mjhmc_frames, hmc_frames]
    names = ['MJHMC', 'ControlHMC']
    frame_grads = [f_idx * samples_per_frame for f_idx in xrange(0, n_frames)]
    return frames, names, frame_grads
Пример #2
0
def generate_figure_samples(samples_per_frame, n_frames, burnin=int(1e4)):
    """ Generates the figure

    :param samples_per_frame: number of sample steps between each frame
    :param n_frames: number of frames to draw
    :returns: None
    :rtype: None
    """
    n_samples = samples_per_frame * n_frames
    ndims = 36
    nbasis = 72

    rand_val = rand(ndims, nbasis / 2, density=0.25)
    W = np.concatenate([rand_val.toarray(), -rand_val.toarray()], axis=1)
    logalpha = np.random.randn(nbasis, 1)
    poe = ProductOfT(nbatch=1, W=W, logalpha=logalpha)

    ## NUTS uses a different number of grad evals for each update step!!
    ## makes it very hard to compare against others w/ same number of update steps
    # # NUTS
    # print "NUTS"
    # nuts_init = poe.Xinit[:, 0]
    # nuts_samples = nuts6(poe.reset(), n_samples, nuts_burnin, nuts_init)[0]
    # nuts_frames = [nuts_samples[f_idx * samples_per_frame, :] for f_idx in xrange(0, n_frames)]
    # x_init = nuts_samples[0, :].reshape(ndims, 1)

    ## burnin
    print "MJHMC burnin"
    x_init = poe.Xinit  # [:, [0]]
    mjhmc = MarkovJumpHMC(distribution=poe.reset(), **mjhmc_params)
    mjhmc.state = HMCState(x_init.copy(), mjhmc)
    mjhmc_samples = mjhmc.sample(burnin)
    print mjhmc_samples.shape
    x_init = mjhmc_samples[:, [0]]

    # control HMC
    print "Control"
    hmc = ControlHMC(distribution=poe.reset(), **control_params)
    hmc.state = HMCState(x_init.copy(), hmc)
    hmc_samples = hmc.sample(n_samples)
    hmc_frames = [hmc_samples[:, f_idx * samples_per_frame].copy() for f_idx in xrange(0, n_frames)]

    # MJHMC
    print "MJHMC"
    mjhmc = MarkovJumpHMC(distribution=poe.reset(), resample=False, **mjhmc_params)
    mjhmc.state = HMCState(x_init.copy(), mjhmc)
    mjhmc_samples = mjhmc.sample(n_samples)
    mjhmc_frames = [mjhmc_samples[:, f_idx * samples_per_frame].copy() for f_idx in xrange(0, n_frames)]

    print mjhmc.r_count, hmc.r_count
    print mjhmc.l_count, hmc.l_count
    print mjhmc.f_count, hmc.f_count
    print mjhmc.fl_count, hmc.fl_count

    frames = [mjhmc_frames, hmc_frames]
    names = ["MJHMC", "ControlHMC"]
    frame_grads = [f_idx * samples_per_frame for f_idx in xrange(0, n_frames)]
    return frames, names, frame_grads
Пример #3
0
def hist_2d(distribution, nsamples, **kwargs):
    """
    Plots a 2d hexbinned histogram of distribution
    """
    distr = distribution(ndims=2)
    sampler = MarkovJumpHMC(distr.Xinit, distr.E, distr.dEdX, **kwargs)
    samples = sampler.sample(nsamples)

    with sns.axes_style("white"):
        sns.jointplot(samples[0], samples[1], kind="kde", stat_func=None)
Пример #4
0
def hist_2d(distr, nsamples, **kwargs):
    """
    Plots a 2d hexbinned histogram of distribution

    Args:
     distr: Distribution object
     nsamples: number of samples to use to generate plot
    """
    sampler = MarkovJumpHMC(distribution=distr, **kwargs)
    samples = sampler.sample(nsamples)

    with sns.axes_style("white"):
        g = sns.jointplot(samples[0], samples[1], kind='kde', stat_func=None)
    return g
Пример #5
0
def gauss_2d(nsamples=1000):
    """
    Another simple test plot
    1d gaussian sampled from each sampler visualized as a joint 2d gaussian
    """
    gaussian = TestGaussian(ndims=1)
    control = HMCBase(distribution=gaussian)
    experimental = MarkovJumpHMC(distribution=gaussian, resample=False)

    with sns.axes_style("white"):
        sns.jointplot(control.sample(nsamples)[0],
                      experimental.sample(nsamples)[0],
                      kind='hex',
                      stat_func=None)
Пример #6
0
def hist_1d(distr, nsamples=1000, nbins=250, control=True, resample=True):
    """
    plots a 1d histogram from each sampler
    distr is (an unitialized) class from distributions
    """
    distribution = distr(ndims=1)
    control_smp = HMCBase(distribution=distribution, epsilon=1)
    experimental_smp = MarkovJumpHMC(distribution=distribution, resample=resample, epsilon=1)

    if control:
        plt.hist(control_smp.sample(nsamples)[0], nbins, normed=True, label="Standard HMCBase", alpha=.5)

    plt.hist(experimental_smp.sample(nsamples)[0], nbins, normed=True, label="Continuous-time HMCBase",alpha=.5)
    plt.legend()
Пример #7
0
def hist_2d(distr, nsamples, **kwargs):
    """
    Plots a 2d hexbinned histogram of distribution

    Args:
     distr: Distribution object
     nsamples: number of samples to use to generate plot
    """
    sampler = MarkovJumpHMC(distribution=distr, **kwargs)
    samples = sampler.sample(nsamples)

    with sns.axes_style("white"):
       g =  sns.jointplot(samples[0], samples[1], kind='kde', stat_func=None)
    return g
Пример #8
0
def gauss_2d(nsamples=1000):
    """
    Another simple test plot
    1d gaussian sampled from each sampler visualized as a joint 2d gaussian
    """
    gaussian = TestGaussian(ndims=1)
    control = HMCBase(distribution=gaussian)
    experimental = MarkovJumpHMC(distribution=gaussian, resample=False)


    with sns.axes_style("white"):
        sns.jointplot(
            control.sample(nsamples)[0],
            experimental.sample(nsamples)[0],
            kind='hex',
            stat_func=None)
Пример #9
0
def hist_1d(distr, nsamples=1000, nbins=250, control=True, resample=True):
    """
    plots a 1d histogram from each sampler
    distr is (an unitialized) class from distributions
    """
    distribution = distr(ndims=1)
    control_smp = HMCBase(distribution=distribution, epsilon=1)
    experimental_smp = MarkovJumpHMC(distribution=distribution,
                                     resample=resample,
                                     epsilon=1)

    if control:
        plt.hist(control_smp.sample(nsamples)[0],
                 nbins,
                 normed=True,
                 label="Standard HMCBase",
                 alpha=.5)

    plt.hist(experimental_smp.sample(nsamples)[0],
             nbins,
             normed=True,
             label="Continuous-time HMCBase",
             alpha=.5)
    plt.legend()