def test_hamiltonian_chain_advance_no_gradient():
    posterior = ToroidalGaussian()
    chain = HamiltonianChain(posterior=posterior, start=[1, 0.1, 0.1])
    first_n = chain.n
    steps = 10
    chain.advance(steps)

    assert chain.n == first_n + steps
    chain.burn = 0
    assert len(chain.get_parameter(0)) == chain.n
    assert len(chain.get_parameter(1)) == chain.n
    assert len(chain.get_parameter(2)) == chain.n
    assert len(chain.probs) == chain.n
def test_hamiltonian_chain_take_step():
    posterior = ToroidalGaussian()
    chain = HamiltonianChain(posterior=posterior,
                             grad=posterior.gradient,
                             start=[1, 0.1, 0.1])
    first_n = chain.n

    chain.take_step()

    assert chain.n == first_n + 1
    chain.burn = 0
    assert len(chain.get_parameter(0)) == chain.n
    assert len(chain.get_parameter(1)) == chain.n
    assert len(chain.get_parameter(2)) == chain.n
    assert len(chain.probs) == chain.n

# create an instance of our posterior class
posterior = ToroidalGaussian()

# create the chain object
chain = HamiltonianChain(posterior=posterior,
                         grad=posterior.gradient,
                         start=[1, 0.1, 0.1])

# advance the chain to generate the sample
chain.advance(6000)

# choose how many samples will be thrown away from the start
# of the chain as 'burn-in'
chain.burn = 2000

chain.matrix_plot(filename='hmc_matrix_plot.png')

# extract sample and probability data from the chain
probs = chain.get_probabilities()
colors = exp(probs - max(probs))
xs, ys, zs = [chain.get_parameter(i) for i in [0, 1, 2]]

import plotly.graph_objects as go
from plotly import offline

fig = go.Figure(data=[
    go.Scatter3d(x=xs,
                 y=ys,
                 z=zs,
        R = sqrt(x**2 + y**2)
        K = 1 - self.R0 / R
        g = array([K * x, K * y, z])
        return -g / self.w2


posterior = ToroidalGaussian()

from inference.mcmc import HamiltonianChain

hmc = HamiltonianChain(posterior=posterior,
                       grad=posterior.gradient,
                       start=[1, 0.1, 0.1])

hmc.advance(6000)
hmc.burn = 1000

from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure(figsize=(5, 4))
ax = fig.add_subplot(111, projection='3d')
ax.set_xticks([-1, -0.5, 0., 0.5, 1.])
ax.set_yticks([-1, -0.5, 0., 0.5, 1.])
ax.set_zticks([-1, -0.5, 0., 0.5, 1.])
# ax.set_title('Hamiltonian Monte-Carlo')
L = 1.1
ax.set_xlim([-L, L])
ax.set_ylim([-L, L])
ax.set_zlim([-L, L])
probs = array(hmc.get_probabilities())
inds = argsort(probs)
colors = exp(probs - max(probs))