示例#1
0
def time_laplace_em_end2end(ncalls=5):
    print("Benchmarking 1 iter of laplace-em fitting on Vanilla LDS.")
    params_list = [SINGLE_DATA_PARAMS, MULT_DATA_PARAMS]
    for (N, D, T, num_datas) in params_list:
        lds_true = ssm.LDS(N, D, dynamics="gaussian", emissions="gaussian")
        datas = [lds_true.sample(T)[1] for _ in range(num_datas)]

        lds_new = ssm.LDS(N, D, dynamics="gaussian", emissions="gaussian")
        print("N, D, T, num_datas: = ", N, D, T, num_datas)
        total = timeit.timeit(
            lambda: lds_new.fit(datas, initialize=False, num_iters=1),
            number=ncalls)
        print("Avg time per call: %f" % (total / ncalls))
示例#2
0
def time_lds_sample(ncalls=20):
    print("Testing continuous sample performance:")
    params_list = [SINGLE_DATA_PARAMS, MULT_DATA_PARAMS]
    for (N, D, T, num_datas) in params_list:
        lds_true = ssm.LDS(N, D, dynamics="gaussian", emissions="gaussian")
        datas = [lds_true.sample(T)[1] for _ in range(num_datas)]

        # Calling fit will return a variational posterior object.
        # This is simpler than creating one ourselves.
        _, posterior = lds_true.fit(datas,
                                    initialize=False,
                                    num_iters=1,
                                    method="laplace_em")

        # Now we test the speed of sampling from this object.
        print("N, D, T, num_datas: = ", N, D, T, num_datas)
        total = timeit.timeit(lambda: posterior.sample_continuous_states(),
                              number=ncalls)
        print("Avg time per call: %f" % (total / ncalls))
示例#3
0
# Specify whether or not to save figures
save_figures = False

# In[2]:

# Set the parameters of the HMM
T = 200  # number of time bins
K = 5  # number of discrete states
D = 2  # number of latent dimensions
N = 10  # number of observed dimensions

# In[3]:

# Make an LDS with the somewhat interesting dynamics parameters
true_lds = ssm.LDS(N, D, emissions="gaussian")
A0 = .99 * random_rotation(D, theta=np.pi / 20)
# S = (1 + 3 * npr.rand(D))
S = np.arange(1, D + 1)
R = np.linalg.svd(npr.randn(D, D))[0] * S
A = R.dot(A0).dot(np.linalg.inv(R))
b = npr.randn(D)
true_lds.dynamics.As[0] = A
true_lds.dynamics.bs[0] = b
_, x, y = true_lds.sample(T)

# In[4]:

# Plot the dynamics vector field
xmins = x.min(axis=0)
xmaxs = x.max(axis=0)
示例#4
0
from ssm.util import random_rotation, find_permutation

# In[2]:

# Set the parameters of the LDS
T = 50  # number of time bins per batch
B = 20  # number of batches
D = 2  # number of latent dimensions
N = 10  # number of observed dimensions

# In[3]:

# Make an SLDS with the true parameters
true_lds = ssm.LDS(N,
                   D,
                   emissions="poisson_nn",
                   emission_kwargs=dict(link="softplus",
                                        hidden_layer_sizes=(50, 50)))
true_lds.dynamics.As[0] = .95 * random_rotation(D, theta=(1) * np.pi / 20)

# Sample a bunch of short trajectories
# (they all converge so we only learn from the initial condition)
zs, xs, ys = list(zip(*[true_lds.sample(T) for _ in range(B)]))

# In[4]:

for x in xs:
    plt.plot(x[:, 0], x[:, 1])
plt.xlabel("$x_1$")
plt.ylabel("$x_2$")
plt.title("Simulated latent trajectories")