def test_vector_version(self):
     assert_almost_equal(
         manual_grad(1., 3., np.array([1.0, 3.0, 5.0]))[-1],
         manual_grad(1., 3., 5.))
     assert_almost_equal(
         manual_grad(1., 3., np.array([1.0, 3.0, 5.0]))[0],
         manual_grad(1., 3., 1.))
Example #2
0
 def test_vector_version(self):
     assert_almost_equal(manual_grad(1., 3., np.array([1.0, 3.0, 5.0]))[-1], manual_grad(1., 3., 5.))
     assert_almost_equal(manual_grad(1., 3., np.array([1.0, 3.0, 5.0]))[0], manual_grad(1., 3., 1.))
Example #3
0
 def test_manual_gradient(self):
     assert_almost_equal(grad_the_log_density_x(1., 3., 3.), manual_grad(1., 3., 3.)[0])
     assert_almost_equal(grad_the_log_density_y(1., 3., 3.), manual_grad(1., 3., 3.)[1])
Example #4
0
 def grad_log_pob(t):
     a = np.sum(manual_grad(t[0],t[1],X),axis=0) + grad_log_prior(t)
     return a
Example #5
0
def grad_log_pob(theta):
    s=[]
    for t in theta:
        s.append( np.sum(manual_grad(t[0],t[1],X),axis=0))
    return np.array(s)
Example #6
0
 def grad_log_pob(t):
     a = np.sum(manual_grad(t[0], t[1], X), axis=0) + grad_log_prior(t)
     return a
Example #7
0
def grad_log_pob(theta):
    s = []
    for t in theta:
        s.append(np.sum(manual_grad(t[0], t[1], X), axis=0))
    return np.array(s)
from sgld_test.likelihoods import gen_X
import numpy as np
import matplotlib.pyplot as plt

theta1 = np.arange(-2, 2, 0.025)
theta2 = np.arange(-2, 2, 0.025)

grid_dimension_size = len(theta1)

theta1, theta2 = np.meshgrid(theta1, theta2)

D_theta1 = np.copy(theta1)
D_theta2 = np.copy(theta1)

sample = gen_X(400)

for i in range(grid_dimension_size):
    for j in range(grid_dimension_size):
        th = np.array([theta1[i, j], theta2[i, j]])

        # subsample = np.random.choice(sample, 40)
        stoch_grad_log_lik = np.sum(manual_grad(th[0], th[1], sample),
                                    axis=0) + grad_log_prior(th)

        D_theta1[i, j] = stoch_grad_log_lik[0]
        D_theta2[i, j] = stoch_grad_log_lik[1]

plt.figure()
CS = plt.streamplot(theta1, theta2, D_theta1, D_theta2, density=[0.5, 1])
plt.show()
Example #9
0
def grad_log_lik(t):
    a = np.sum(manual_grad(t[0],t[1],X),axis=0)  - t[1]/SIGMA_2 -t[0]/SIGMA_1
    return a
 def test_manual_gradient(self):
     assert_almost_equal(grad_the_log_density_x(1., 3., 3.),
                         manual_grad(1., 3., 3.)[0])
     assert_almost_equal(grad_the_log_density_y(1., 3., 3.),
                         manual_grad(1., 3., 3.)[1])
import matplotlib.pyplot as plt


theta1 = np.arange(-2, 2, 0.025)
theta2 = np.arange(-2, 2, 0.025)

grid_dimension_size = len(theta1)

theta1, theta2 = np.meshgrid(theta1, theta2)

D_theta1 = np.copy(theta1)
D_theta2 = np.copy(theta1)

sample = gen_X(400)

for i in range(grid_dimension_size):
    for j in range(grid_dimension_size):
        th = np.array([theta1[i, j], theta2[i, j]])

        # subsample = np.random.choice(sample, 40)
        stoch_grad_log_lik = np.sum(manual_grad(th[0], th[1], sample), axis=0)  + grad_log_prior(th)

        D_theta1[i, j] = stoch_grad_log_lik[0]
        D_theta2[i, j] = stoch_grad_log_lik[1]

plt.figure()
CS = plt.streamplot(theta1, theta2, D_theta1, D_theta2, density=[0.5, 1])
plt.show()