Exemple #1
0
 def test_to_variable(self):
     print "Test Third-Order Time-Varying Interactions."
     # Repeat test for different numbers of neurons
     for N in 2**numpy.arange(2, 4):
         print N
         # Compute dimensionality of natural-parameter distribution
         D = transforms.compute_D(N, 3)
         # Create a regular set of theta parameters for each timestep
         theta = numpy.zeros((self.T, D))
         theta[:,:N] = self.theta_base
         theta[:,N:] = -1.
         # Add time-varying components for some neurons
         numpy.random.seed(self.wave_seed)
         n_random = numpy.random.randint(0, N / 2)
         cells = random.sample(numpy.arange(N), n_random)
         for i in xrange(n_random):
             # Draw random phase, amplitude and frequency
             phi = numpy.random.uniform(0, 2 * numpy.pi)
             A = numpy.random.uniform(2)
             f = 1 / (numpy.random.uniform(self.T / 5., 5 * self.T) * 1e-3)
             idx = cells[i]
             theta[:,idx] = self.theta_base + \
                 self.wave(A, f, phi, self.T * 1e-3)
         # Add time-varying components for some interactions
         n_random = numpy.random.randint(0, D - N)
         interactions = random.sample(numpy.arange(N, D), n_random)
         for i in xrange(n_random):
             # Draw random phase, amplitude and frequency
             phi = numpy.random.uniform(0, 2 * numpy.pi)
             A = numpy.random.uniform(1, 2)
             f = 1 / (numpy.random.uniform(self.T / 5., 5 * self.T) * 1e-3)
             idx = interactions[i]
             theta[:,idx] = self.wave(A, f, phi, self.T * 1e-3)
         # Run the actual test
         self.run_ssasc(theta, N, 3)
def generate_stationary_thetas(N, O, T):
    """ Generates stationary thetas.

    :param int N:
        Number of cells
    :param int O:
        Order of model
    :param int T:
        Number of time bins

    :return:
        Array (t, d) with non changing thetas. 'd' is the dimensionality of the model.
    """
    th1, th2 = -3., 0.
    D = transforms.compute_D(N, O)
    th = numpy.zeros([T, D])
    th[:, :N] = th1
    th[:, N:] = th2 / N * (1 + 0.5 * numpy.random.randn(T, D - N))
    idx = numpy.triu_indices(N, 1)
    theta_array = numpy.zeros([N, N])
    theta_array[idx[0], idx[1]] = th[0, N:]
    theta_array[idx[1], idx[0]] = th[0, N:]
    mean_thetas = numpy.mean(theta_array, axis=0)
    theta_array -= numpy.tile(mean_thetas, [N, 1])
    th[:, N:] = theta_array[idx[0], idx[1]]
    return th
Exemple #3
0
 def test_to_constant(self):
     print "Test Third-Order Constant Interactions."
     # Repeat test for different numbers of neurons
     for N in 2**numpy.arange(2, 4):
         print N
         # Compute dimensionality of natural-parameter distribution
         D = transforms.compute_D(N, 3)
         # Create a regular set of theta parameters for each timestep
         theta = numpy.zeros((self.T, D))
         theta[:,:N] = self.theta_base
         theta[:,N:] = -1.
         # Run the actual test
         self.run_ssasc(theta, N, 3)
def generate_thetas(N,
                    O,
                    T,
                    mu1=-2.,
                    sigma1=50,
                    mu2=0.,
                    sigma2=50,
                    alpha=12.,
                    ratio_modulated=1.):
    """ Generates dynamic thetas by a Gaussian Process.

    :param int N:
        Number of cells
    :param int O:
        Order of model
    :param int T:
        Number of time bins
    :param numpy.float mu1:
        Mean of first order parameters for the Gaussian process (Default=-2)
    :param float sigma1:
        Determines how strong first order parameters change over time (Default=50)
    :param float mu2:
        Mean of couplings for the Gaussian process (Default=0)
    :param float sigma2:
        Determines how strong couplings change over time (Default=50)
    :param float alpha:
        Scalar that determines the correlations over time of the Gaussian process is (Default=12)
    :param float ratio_modulated:
        Scalar between 0 and 1, how many of the interaction should be modulated. (Default=1)

    :return:
        Matrix with dimensions (t, d) with theta parameters generated by GP.
         'd' is the dimensionality of the model
    """
    D = transforms.compute_D(N, O)
    MU = numpy.tile(mu1, (T, D))
    MU[:, N:] = mu2
    # Create covariance matrix
    X = numpy.tile(numpy.arange(T), (T, 1))
    K1 = 1. / alpha * numpy.exp(-(X - X.transpose())**2 / (2. * sigma1**2))
    K2 = 1. / alpha * numpy.exp(-(X - X.transpose())**2 / (2. * sigma2**2))
    # Generate Gaussian processes
    L1 = numpy.linalg.cholesky(K1 + 1e-13 * numpy.eye(T))
    L2 = numpy.linalg.cholesky(K2 + 1e-13 * numpy.eye(T))
    theta = numpy.empty([T, D])
    theta[:, :N] = mu1 + numpy.dot(L1, numpy.random.randn(T, N))
    theta[:, N:] = mu2 + numpy.dot(L2, numpy.random.randn(T, D - N))
    num_non_modulated = int(numpy.around((1. - ratio_modulated) * (D - N)))
    non_modulated_idx = random.sample(range(N, D), num_non_modulated)
    theta[:, non_modulated_idx] = 0.
    return theta
Exemple #5
0
 def __init__(self, spikes, window, D, map_function, lmbda, J,\
              theta_o, sigma_o, exact):
     # Order of interactions considered in the graphs
     order_ising = 2
     # Exact boolean
     self.exact = exact
     # Record the input parameters
     self.spikes, self.window = spikes, window
     self.max_posterior = map_function
     # Count the number of original static Ising models
     self.D = D
     # Compute the `sample' spike-train interactions from the input spikes
     self.F_tilde = transforms.compute_y(self.spikes, order_ising, self.window)
     # Count timesteps, trials, cells
     T, self.R, self.N = self.spikes.shape
     self.T = self.F_tilde.shape[0]
     assert self.T == T / window
     # Dimension of the graphs
     self.dim = transforms.compute_D(self.N, order_ising)
     # Initialisation of the graphs
     if J is not None:
         self.J = J
     else:
         self.J = numpy.random.normal(0, 1, (self.dim, D))
     # Compute the feature vector for all timesteps
     self.f = transforms.compute_f_LD(self.F_tilde, self.J)
     # Initialise one-step-prediction- filtered- smoothed-density means
     self.theta_o = numpy.ones((self.T,self.D)) * theta_o
     self.theta_f = numpy.zeros((self.T,self.D))
     self.theta_s = numpy.zeros((self.T,self.D))
     # Initialise covariances of the same (an I-matrix for each timestep)
     I = [numpy.identity(self.D) for i in range(self.T)]
     I = numpy.vstack(I).reshape((self.T,self.D,self.D))
     self.sigma_o = sigma_o * I
     self.sigma_f = .1 * I
     self.sigma_s = .1 * I
     del I
     # Intialise autoregressive and transition probability hyperparameters
     self.F = numpy.identity(self.D)
     self.Q = 1/lmbda * numpy.identity(self.D)
     # Metadata about EM algorithm execution
     self.iterations, self.convergence = 0, numpy.inf
Exemple #6
0
def generate_data_ctime(data_path='../Data/', max_network_size=60,
                              num_procs=4):
    N, O, R, T = 10, 2, 200, 500
    num_of_networks = max_network_size/N
    mu = numpy.zeros(T)
    x = numpy.arange(1, 401)
    mu[100:] = 1. * (3. / (2. * numpy.pi * (x / 400. * 3.) ** 3)) ** .5 * \
               numpy.exp(-3. * ((x / 400. * 3.) - 1.) ** 2 /
                         (2. * (x / 400. * 3.)))

    D = transforms.compute_D(N, O)
    thetas = numpy.empty([num_of_networks, T, D])
    transforms.initialise(N, O)
    for i in range(num_of_networks):
        thetas[i] = synthesis.generate_thetas(N, O, T, mu1=-2.)
        thetas[i, :, :N] += mu[:, numpy.newaxis]

    R = 500
    f = h5py.File(data_path + 'comp_time_data.h5', 'w')
    f.create_dataset('N', data=numpy.arange(N, max_network_size+N, N))
    f.create_dataset('ctime', shape=[2,num_of_networks])
    f.close()
    for i in range(num_of_networks):
        print 'N=%d' % ((i + 1) * N)
        D = transforms.compute_D((i + 1) * N, O)
        theta_all = numpy.empty([T, D])
        triu_idx = numpy.triu_indices(N, k=1)
        triu_idx_all = numpy.triu_indices((i + 1) * N, k=1)

        for j in range(i + 1):
            theta_all[:, N * j:(j + 1) * N] = thetas[j, :, :N]

        for t in range(T):
            theta_ij = numpy.zeros([(i + 1) * N, (i + 1) * N])
            for j in range(i + 1):
                theta_ij[triu_idx[0] + j * N, triu_idx[1] + j * N] = \
                    thetas[j, t, N:]

            theta_all[t, (i + 1) * N:] = theta_ij[triu_idx_all]

        spikes = synthesis.generate_spikes_gibbs_parallel(theta_all,
                                                          (i + 1) * N, O, R,
                                                          sample_steps=10,
                                                          num_proc=num_procs)
        t1 = time.time()
        result = __init__.run(spikes, O, map_function='cg',
                                    param_est='pseudo',
                           param_est_eta='bethe_hybrid',
                            lmbda1=100,
                           lmbda2=200)
        t2 = time.time()
        ctime_bethe = t2 - t1

        f = h5py.File(data_path + 'comp_time_data.h5', 'r+')
        f['ctime'][0, i] = ctime_bethe
        f.close()

        try:
            t1 = time.time()
            result = __init__.run(spikes, O, map_function='cg',
                                                   param_est='pseudo',
                                                   param_est_eta='mf',
                                                   lmbda1=100,
                                                   lmbda2=200)
            t2 = time.time()
            ctime_TAP = t2 - t1
        except Exception:
            ctime_TAP = numpy.nan

        f = h5py.File(data_path + 'comp_time_data.h5', 'r+')
        f['ctime'][1, i] = ctime_TAP
        f.close()
Exemple #7
0
# Number of graphs in the fitted model
D_fit = 2

# Precision
lmbda = 1000

# Random seed
seed = numpy.random.seed(0)

# p_map and e_map (Initialise)
if exact:
    transforms.initialise(N, 2)

# Number of dimensions of the graphs
dim = transforms.compute_D(N, 2)

# Generative J matrix
J_gen = numpy.zeros((dim, D_fit))
x1 = numpy.array([-1, 1, -1, 1, 1, 1, -1, 1, -1])
x2 = numpy.array([1, 1, 1, -1, 1, -1, -1, 1, -1])
a1 = numpy.outer(x1, x1)
a2 = numpy.outer(x2, x2)
J_gen[:N, 0] = numpy.diag(a1)
J_gen[:N, 1] = numpy.diag(a2)
J_gen[N:, 0] = a1[numpy.triu_indices(N, k=1)]
J_gen[N:, 1] = a2[numpy.triu_indices(N, k=1)]
J_gen = J_gen / (numpy.var(J_gen, axis=0)**0.5)

# Number of time bins in one epoch
T1 = 1500
Exemple #8
0
def generate_data_figure1(data_path = '../Data/'):
    N, O, R, T = 15, 2, 200, 500
    mu = numpy.zeros(T)
    x = numpy.arange(1, 401)
    mu[100:] = 1. * (3. / (2. * numpy.pi * (x/400.*3.) ** 3)) ** .5 * \
               numpy.exp(-3. * ((x/400.*3.) - 1.) ** 2 / (2. * (x/400.*3.)))
    theta1 = synthesis.generate_thetas(N, O, T, mu1=-2.)
    theta2 = synthesis.generate_thetas(N, O, T, mu1=-2.)
    theta1[:, :N] += mu[:, numpy.newaxis]
    theta2[:, :N] += mu[:, numpy.newaxis]
    D = transforms.compute_D(N * 2, O)
    theta_all = numpy.empty([T, D])
    theta_all[:, :N] = theta1[:, :N]
    theta_all[:, N:2 * N] = theta2[:, :N]
    triu_idx = numpy.triu_indices(N, k=1)
    triu_idx_all = numpy.triu_indices(2 * N, k=1)
    for t in range(T):
        theta_ij = numpy.zeros([2 * N, 2 * N])
        theta_ij[triu_idx] = theta1[t, N:]
        theta_ij[triu_idx[0] + N, triu_idx[1] + N] = theta2[t, N:]
        theta_all[t, 2 * N:] = theta_ij[triu_idx_all]

    psi1 = numpy.empty([T, 3])
    psi2 = numpy.empty([T, 3])
    eta1 = numpy.empty(theta1.shape)
    eta2 = numpy.empty(theta2.shape)
    alpha = [.999,1.,1.001]
    transforms.initialise(N, O)
    for i in range(T):
        for j, a in enumerate(alpha):
            psi1[i, j] = transforms.compute_psi(a * theta1[i])
        p = transforms.compute_p(theta1[i])
        eta1[i] = transforms.compute_eta(p)
        for j, a in enumerate(alpha):
            psi2[i, j] = transforms.compute_psi(a * theta2[i])
        p = transforms.compute_p(theta2[i])
        eta2[i] = transforms.compute_eta(p)

    psi_all = psi1 + psi2
    S1 = -numpy.sum(eta1 * theta1, axis=1) + psi1[:, 1]
    S1 /= numpy.log(2)
    S2 = -numpy.sum(eta2 * theta2, axis=1) + psi2[:, 1]
    S2 /= numpy.log(2)
    S_all = S1 + S2

    C1 = (psi1[:, 0] - 2. * psi1[:, 1] + psi1[:, 2]) / .001 ** 2
    C1 /= numpy.log(2)
    C2 = (psi2[:, 0] - 2. * psi2[:, 1] + psi2[:, 2]) / .001 ** 2
    C2 /= numpy.log(2)

    C_all = C1 + C2

    spikes = synthesis.generate_spikes_gibbs_parallel(theta_all, 2 * N, O, R,
                                                      sample_steps=10,
                                                      num_proc=4)

    print 'Model and Data generated'

    emd = __init__.run(spikes, O, map_function='cg', param_est='pseudo',
                       param_est_eta='bethe_hybrid', lmbda1=100, lmbda2=200)

    f = h5py.File(data_path + 'figure1data.h5', 'w')
    g_data = f.create_group('data')
    g_data.create_dataset('theta_all', data=theta_all)
    g_data.create_dataset('psi_all', data=psi_all)
    g_data.create_dataset('S_all', data=S_all)
    g_data.create_dataset('C_all', data=C_all)
    g_data.create_dataset('spikes', data=spikes)
    g_data.create_dataset('theta1', data=theta1)
    g_data.create_dataset('theta2', data=theta2)
    g_data.create_dataset('psi1', data=psi1)
    g_data.create_dataset('S1', data=S1)
    g_data.create_dataset('C1', data=C1)
    g_data.create_dataset('psi2', data=psi2)
    g_data.create_dataset('S2', data=S2)
    g_data.create_dataset('C2', data=C2)
    g_fit = f.create_group('fit')
    g_fit.create_dataset('theta_s', data=emd.theta_s)
    g_fit.create_dataset('sigma_s', data=emd.sigma_s)
    g_fit.create_dataset('Q', data=emd.Q)
    f.close()

    print 'Fit and saved'

    f = h5py.File(data_path + 'figure1data.h5', 'r+')
    g_fit = f['fit']
    theta = g_fit['theta_s'].value
    sigma = g_fit['sigma_s'].value

    X = numpy.random.randn(theta.shape[0], theta.shape[1], 100)
    theta_sampled = \
        theta[:, :, numpy.newaxis] + X * numpy.sqrt(sigma)[:, :, numpy.newaxis]

    T = range(theta.shape[0])
    eta_sampled = numpy.empty([theta.shape[0], theta.shape[1], 100])
    psi_sampled = numpy.empty([theta.shape[0], 100, 3])

    func = partial(get_sampled_eta_psi, theta_sampled=theta_sampled, N=2*N)
    pool = multiprocessing.Pool(10)
    results = pool.map(func, T)

    for eta, psi, i in results:
        eta_sampled[i] = eta
        psi_sampled[i] = psi
    S_sampled = \
        -(numpy.sum(eta_sampled*theta_sampled, axis=1) - psi_sampled[:, :, 1])
    S_sampled /= numpy.log(2)
    C_sampled = \
        (psi_sampled[:, :, 0] - 2.*psi_sampled[:, :, 1] +
         psi_sampled[:, :, 2])/.001**2
    C_sampled /= numpy.log(2)
    g_sampled = f.create_group('sampled_results')
    g_sampled.create_dataset('theta_sampled', data=theta_sampled)
    g_sampled.create_dataset('eta_sampled', data=eta_sampled)
    g_sampled.create_dataset('psi_sampled', data=psi_sampled)
    g_sampled.create_dataset('S_sampled', data=S_sampled)
    g_sampled.create_dataset('C_sampled', data=C_sampled)
    f.close()

    print 'Done'
Exemple #9
0
def generate_data_figure2(data_path='../Data/', max_network_size=60):
    N, O, R, T = 10, 2, 200, 500
    num_of_networks = max_network_size/N
    mu = numpy.zeros(T)
    x = numpy.arange(1, 401)
    mu[100:] = 1. * (3. / (2. * numpy.pi * (x / 400. * 3.) ** 3)) ** .5 * \
               numpy.exp(-3. * ((x / 400. * 3.) - 1.) ** 2 /
                         (2. * (x / 400. * 3.)))

    D = transforms.compute_D(N, O)
    thetas = numpy.empty([num_of_networks, T, D])
    etas = numpy.empty([num_of_networks, T, D])
    psi = numpy.empty([num_of_networks, T])
    S = numpy.empty([num_of_networks, T])
    C = numpy.empty([num_of_networks, T])
    transforms.initialise(N, O)
    for i in range(num_of_networks):
        thetas[i] = synthesis.generate_thetas(N, O, T, mu1=-2.)
        thetas[i, :, :N] += mu[:, numpy.newaxis]
        for t in range(T):
            p = transforms.compute_p(thetas[i, t])
            etas[i, t] = transforms.compute_eta(p)
            psi[i, t] = transforms.compute_psi(thetas[i, t])
            psi1 = transforms.compute_psi(.999 * thetas[i, t])
            psi2 = transforms.compute_psi(1.001 * thetas[i, t])
            C[i, t] = (psi1 - 2. * psi[i, t] + psi2) / .001 ** 2
            S[i, t] = -(numpy.sum(etas[i, t] * thetas[i, t]) - psi[i, t])
    C /= numpy.log(2)
    S /= numpy.log(2)
    f = h5py.File(data_path + 'figure2data.h5', 'w')
    g1 = f.create_group('data')
    g1.create_dataset('thetas', data=thetas)
    g1.create_dataset('etas', data=etas)
    g1.create_dataset('psi', data=psi)
    g1.create_dataset('S', data=S)
    g1.create_dataset('C', data=C)
    g2 = f.create_group('error')
    g2.create_dataset('MISE_thetas', shape=[num_of_networks])
    g2.create_dataset('MISE_population_rate', shape=[num_of_networks])
    g2.create_dataset('MISE_psi', shape=[num_of_networks])
    g2.create_dataset('MISE_S', shape=[num_of_networks])
    g2.create_dataset('MISE_C', shape=[num_of_networks])
    g2.create_dataset('population_rate', shape=[num_of_networks, T])
    g2.create_dataset('psi', shape=[num_of_networks, T])
    g2.create_dataset('S', shape=[num_of_networks, T])
    g2.create_dataset('C', shape=[num_of_networks, T])
    f.close()
    for i in range(num_of_networks):
        print 'N=%d' % ((i + 1) * N)
        D = transforms.compute_D((i + 1) * N, O)
        theta_all = numpy.empty([T, D])
        triu_idx = numpy.triu_indices(N, k=1)
        triu_idx_all = numpy.triu_indices((i + 1) * N, k=1)
        for j in range(i + 1):
            theta_all[:, N * j:(j + 1) * N] = thetas[j, :, :N]
            for t in range(T):
                theta_ij = numpy.zeros([(i + 1) * N, (i + 1) * N])
                for j in range(i + 1):
                    theta_ij[triu_idx[0] + j * N, triu_idx[1] + j * N] = \
                        thetas[j, t, N:]

            theta_all[t, (i + 1) * N:] = theta_ij[triu_idx_all]

        spikes = synthesis.generate_spikes_gibbs_parallel(theta_all
                                                          , (i + 1) * N, O, R,
                                                          sample_steps=10,
                                                          num_proc=4)
        emd = __init__.run(spikes, O, map_function='cg', param_est='pseudo',
                           param_est_eta='bethe_hybrid', lmbda1=100,
                           lmbda2=200)

        eta_est = numpy.empty(emd.theta_s.shape)
        psi_est = numpy.empty(T)
        S_est = numpy.empty(T)
        C_est = numpy.empty(T)
        for t in range(T):
            eta_est[t], psi_est[t] = bethe_approximation.compute_eta_hybrid(
                emd.theta_s[t], (i + 1) * N, return_psi=1)
            psi1 = bethe_approximation.compute_eta_hybrid(
                .999 * emd.theta_s[t], (i + 1) * N, return_psi=1)[1]
            psi2 = bethe_approximation.compute_eta_hybrid(
                1.001 * emd.theta_s[t], (i + 1) * N, return_psi=1)[1]
            S_est[t] = -(numpy.sum(eta_est[t] * emd.theta_s[t]) - psi_est[t])
            C_est[t] = (psi1 - 2. * psi_est[t] + psi2) / .001 ** 2
        S_est /= numpy.log(2)
        C_est /= numpy.log(2)
        population_rate = numpy.mean(numpy.mean(etas[:i + 1, :, :N], axis=0),
                                     axis=1)
        population_rate_est = numpy.mean(eta_est[:, :(i + 1) * N], axis=1)
        psi_true = numpy.sum(psi[:(i + 1), :], axis=0)
        S_true = numpy.sum(S[:(i + 1), :], axis=0)
        C_true = numpy.sum(C[:(i + 1), :], axis=0)

        f = h5py.File(data_path + 'figure2data.h5', 'r+')
        f['error']['MISE_thetas'][i] = numpy.mean(
            (theta_all - emd.theta_s) ** 2)
        f['error']['MISE_population_rate'][i] = numpy.mean(
            (population_rate - population_rate_est) ** 2)
        f['error']['MISE_psi'][i] = numpy.mean((psi_est - psi_true) ** 2)
        f['error']['MISE_S'][i] = numpy.mean((S_est - S_true) ** 2)
        f['error']['MISE_C'][i] = numpy.mean((C_est - C_true) ** 2)
        f['error']['population_rate'][i] = population_rate_est
        f['error']['psi'][i] = psi_est
        f['error']['S'][i] = S_est
        f['error']['C'][i] = C_est
        f.close()

    f = h5py.File(data_path + 'figure2data.h5', 'r+')
    thetas = f['data']['thetas'].value
    etas = f['data']['etas'].value
    psi = f['data']['psi'].value
    S = f['data']['S'].value
    C = f['data']['C'].value

    g2 = f.create_group('error500')
    g2.create_dataset('population_rate', shape=[num_of_networks, T])
    g2.create_dataset('psi', shape=[num_of_networks, T])
    g2.create_dataset('S', shape=[num_of_networks, T])
    g2.create_dataset('C', shape=[num_of_networks, T])
    g2.create_dataset('MISE_thetas', shape=[num_of_networks])
    g2.create_dataset('MISE_population_rate', shape=[num_of_networks])
    g2.create_dataset('MISE_psi', shape=[num_of_networks])
    g2.create_dataset('MISE_S', shape=[num_of_networks])
    g2.create_dataset('MISE_C', shape=[num_of_networks])
    f.close()

    R = 500

    for i in range(num_of_networks):
        print 'N=%d' % ((i + 1) * N)
        D = transforms.compute_D((i + 1) * N, O)
        theta_all = numpy.empty([T, D])
        triu_idx = numpy.triu_indices(N, k=1)
        triu_idx_all = numpy.triu_indices((i + 1) * N, k=1)

        for j in range(i + 1):
            theta_all[:, N * j:(j + 1) * N] = thetas[j, :, :N]

        for t in range(T):
            theta_ij = numpy.zeros([(i + 1) * N, (i + 1) * N])
            for j in range(i + 1):
                theta_ij[triu_idx[0] + j * N, triu_idx[1] + j * N] = \
                    thetas[j, t, N:]

            theta_all[t, (i + 1) * N:] = theta_ij[triu_idx_all]

        spikes = synthesis.generate_spikes_gibbs_parallel(theta_all,
                                                          (i + 1) * N, O, R,
                                                          sample_steps=10,
                                                          num_proc=4)
        emd = __init__.run(spikes, O, map_function='cg', param_est='pseudo',
                           param_est_eta='bethe_hybrid', lmbda1=100,
                           lmbda2=200)

        eta_est = numpy.empty(emd.theta_s.shape)
        psi_est = numpy.empty(T)
        S_est = numpy.empty(T)
        C_est = numpy.empty(T)

        for t in range(T):
            eta_est[t], psi_est[t] = \
                bethe_approximation.compute_eta_hybrid(emd.theta_s[t],
                                                       (i + 1) * N,
                                                       return_psi=1)
            psi1 = bethe_approximation.compute_eta_hybrid(.999 * emd.theta_s[t],
                                                          (i + 1) * N,
                                                          return_psi=1)[1]
            psi2 = bethe_approximation.compute_eta_hybrid(
                1.001 * emd.theta_s[t], (i + 1) * N, return_psi=1)[1]
            S_est[t] = -(numpy.sum(eta_est[t] * emd.theta_s[t]) - psi_est[t])
            C_est[t] = (psi1 - 2. * psi_est[t] + psi2) / .001 ** 2
        S_est /= numpy.log(2)
        C_est /= numpy.log(2)
        population_rate = numpy.mean(numpy.mean(etas[:i + 1, :, :N], axis=0),
                                     axis=1)
        population_rate_est = numpy.mean(eta_est[:, :(i + 1) * N], axis=1)
        psi_true = numpy.sum(psi[:(i + 1), :], axis=0)
        S_true = numpy.sum(S[:(i + 1), :], axis=0)
        C_true = numpy.sum(C[:(i + 1), :], axis=0)

        f = h5py.File(data_path + 'figure2data.h5', 'r+')
        f['error500']['MISE_thetas'][i] = numpy.mean(
            (theta_all - emd.theta_s) ** 2)
        f['error500']['MISE_population_rate'][i] = numpy.mean(
            (population_rate - population_rate_est) ** 2)
        f['error500']['MISE_psi'][i] = numpy.mean((psi_est - psi_true) ** 2)
        f['error500']['MISE_S'][i] = numpy.mean((S_est - S_true) ** 2)
        f['error500']['MISE_C'][i] = numpy.mean((C_est - C_true) ** 2)
        f['error500']['population_rate'][i] = population_rate_est
        f['error500']['psi'][i] = psi_est
        f['error500']['S'][i] = S_est
        f['error500']['C'][i] = C_est
        f.close()