Example #1
0
def make_nascar_model():
    As = [
        random_rotation(D_latent, np.pi / 24.),
        random_rotation(D_latent, np.pi / 48.)
    ]

    # Set the center points for each system
    centers = [np.array([+2.0, 0.]), np.array([-2.0, 0.])]
    bs = [
        -(A - np.eye(D_latent)).dot(center) for A, center in zip(As, centers)
    ]

    # Add a "right" state
    As.append(np.eye(D_latent))
    bs.append(np.array([+0.1, 0.]))

    # Add a "right" state
    As.append(np.eye(D_latent))
    bs.append(np.array([-0.25, 0.]))

    # Construct multinomial regression to divvy up the space
    w1, b1 = np.array([+1.0, 0.0]), np.array([-2.0])  # x + b > 0 -> x > -b
    w2, b2 = np.array([-1.0, 0.0]), np.array([-2.0])  # -x + b > 0 -> x < b
    w3, b3 = np.array([0.0, +1.0]), np.array([0.0])  # y > 0
    w4, b4 = np.array([0.0, -1.0]), np.array([0.0])  # y < 0
    Rs = np.row_stack((100 * w1, 100 * w2, 10 * w3, 10 * w4))
    r = np.concatenate((100 * b1, 100 * b2, 10 * b3, 10 * b4))

    true_rslds = SLDS(D_obs,
                      K,
                      D_latent,
                      transitions="recurrent_only",
                      dynamics="diagonal_gaussian",
                      emissions="gaussian_orthog",
                      single_subspace=True)
    true_rslds.dynamics.mu_init = np.tile(np.array([[0, 1]]), (K, 1))
    true_rslds.dynamics.sigmasq_init = 1e-4 * np.ones((K, D_latent))
    true_rslds.dynamics.As = np.array(As)
    true_rslds.dynamics.bs = np.array(bs)
    true_rslds.dynamics.sigmasq = 1e-4 * np.ones((K, D_latent))

    true_rslds.transitions.Rs = Rs
    true_rslds.transitions.r = r

    true_rslds.emissions.inv_etas = np.log(1e-2) * np.ones((1, D_obs))
    return true_rslds
Example #2
0
def get_random_dynamics(n_latent_dimensions,
                        n_discrete_states,
                        mystery_param_95=.95,
                        mystery_param_20=20):
    dynamics = np.zeros((n_discrete_states,n_latent_dimensions,n_latent_dimensions))
    for k in range(n_discrete_states):
        dynamics[k] = mystsery_pararm_95 * random_rotation(
            n_latent_dimensions, theta=(k + 1) * np.pi / mystery_param_20)
        
    return dynamics                  
Example #3
0
 def __init__(self, K, D, M=0, lags=1):
     super(AutoRegressiveObservations, self).__init__(K, D, M)
     
     # Distribution over initial point
     self.mu_init = np.zeros(D)
     self.inv_sigma_init = np.zeros(D)
     
     # AR parameters
     assert lags > 0 
     self.lags = lags
     self.As = .95 * np.array([
             np.column_stack([random_rotation(D), np.zeros((D, (lags-1) * D))]) 
         for _ in range(K)])
     self.bs = npr.randn(K, D)
     self.Vs = npr.randn(K, D, M)
     self.inv_sigmas = -4 + npr.randn(K, D)
Example #4
0
def test_implementation(user_function):
    d_latent = 2
    d_observation = 3
    A = .99 * util.random_rotation(
        d_latent)  # dynamics matrix, a slowly decaying rotation
    C = np.random.rand(d_observation, d_latent)  # observation matrix, random

    Q = np.diag(np.random.rand(d_latent, ))  # state noise covariance
    R = np.diag(np.random.rand(
        d_observation, ))  # observation noise covariance

    pi_0 = np.zeros((d_latent, ))  # initial state mean
    V_0 = Q  # initial state covariance

    num_timesteps = 200
    seed = 2
    X, Y = mystery_function(A, C, Q, R, pi_0, V_0, num_timesteps, seed)
    X_user, Y_user = user_function(A, C, Q, R, pi_0, V_0, num_timesteps, seed)

    if not X_user.shape == X.shape:
        if not Y_user.shape == Y.shape:
            print(
                'Try again! The shape of both your X and Y look wrong -- they should be {} and {}'
                .format(X.shape, Y.shape))
            return
        else:
            print(
                'Try again! The shape of your X looks wrong -- it should be {}'
                .format(X.shape))
            return
    else:
        if not Y_user.shape == Y.shape:
            print(
                'Try again! The shape of your Y looks wrong -- it should be {}'
                .format(Y.shape))
            return

    if not np.array_equal(X, X_user):
        if not np.array_equal(Y, Y_user):
            print('Try again! Neither X nor Y matches our solution.')
    else:
        if not np.array_equal(Y, Y_user):
            print('Try again! X matches our solution, but Y does not')
        else:
            print('Good job! Your implementation matches our solution')
Example #5
0
# Specify whether or not to save figures
save_figures = False

# In[2]:

# Set the parameters of the HMM
T = 200  # number of time bins
K = 5  # number of discrete states
D = 2  # number of latent dimensions
N = 10  # number of observed dimensions

# In[3]:

# Make an LDS with the somewhat interesting dynamics parameters
true_lds = ssm.LDS(N, D, emissions="gaussian")
A0 = .99 * random_rotation(D, theta=np.pi / 20)
# S = (1 + 3 * npr.rand(D))
S = np.arange(1, D + 1)
R = np.linalg.svd(npr.randn(D, D))[0] * S
A = R.dot(A0).dot(np.linalg.inv(R))
b = npr.randn(D)
true_lds.dynamics.As[0] = A
true_lds.dynamics.bs[0] = b
_, x, y = true_lds.sample(T)

# In[4]:

# Plot the dynamics vector field
xmins = x.min(axis=0)
xmaxs = x.max(axis=0)
npts = 20
Example #6
0
import matplotlib.pyplot as plt

from ssm.models import SLDS, LDS
from ssm.variational import SLDSMeanFieldVariationalPosterior, SLDSTriDiagVariationalPosterior
from ssm.util import random_rotation, find_permutation

# Set the parameters of the HMM
T = 1000    # number of time bins
K = 5       # number of discrete states
D = 2       # number of latent dimensions
N = 10      # number of observed dimensions

# Make an SLDS with the true parameters
true_slds = SLDS(N, K, D, emissions="gaussian")
for k in range(K):
    true_slds.dynamics.As[k] = .95 * random_rotation(D, theta=(k+1) * np.pi/20)

# Sample training and test data from the SLDS
z, x, y = true_slds.sample(T)
z_test, x_test, y_test = true_slds.sample(T)

# Mask off some data
mask = npr.rand(T, N) < 0.75
y_masked = y * mask

# Fit an SLDS with mean field posterior
print("Fitting SLDS with SVI using structured variational posterior")
slds = SLDS(N, K, D, emissions="gaussian")
slds.initialize(y_masked, masks=mask)

q_mf = SLDSMeanFieldVariationalPosterior(slds, y_masked, masks=mask)
Example #7
0
# Set the parameters of the LDS
T = 50  # number of time bins per batch
B = 20  # number of batches
D = 2  # number of latent dimensions
N = 10  # number of observed dimensions

# In[3]:

# Make an SLDS with the true parameters
true_lds = ssm.LDS(N,
                   D,
                   emissions="poisson_nn",
                   emission_kwargs=dict(link="softplus",
                                        hidden_layer_sizes=(50, 50)))
true_lds.dynamics.As[0] = .95 * random_rotation(D, theta=(1) * np.pi / 20)

# Sample a bunch of short trajectories
# (they all converge so we only learn from the initial condition)
zs, xs, ys = list(zip(*[true_lds.sample(T) for _ in range(B)]))

# In[4]:

for x in xs:
    plt.plot(x[:, 0], x[:, 1])
plt.xlabel("$x_1$")
plt.ylabel("$x_2$")
plt.title("Simulated latent trajectories")

# In[5]: