Example #1
0
def test_state_space_normal_diag():
    """Tester for correctness of StateSpaceNormalDiag transformation."""

    time = 5
    in_dim = 3
    out_dim = 2
    n_sample = 100

    tot_dist = 1
    input_ = np.random.rand(tot_dist, time, in_dim)

    with tf.Graph().as_default():
        input_tensor = tf.constant(input_)
        d_1 = ReparameterizedDistribution(out_dim=(time, out_dim),
                                          in_dim=(time, in_dim),
                                          distribution=StateSpaceNormalDiag,
                                          transform=MultiLayerPerceptron,
                                          reparam_scale=True,
                                          hidden_units=[20, 20])
        s_1 = d_1.sample(n_samples=n_sample, y=input_tensor)
        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            tf_res = sess.run(s_1)

    expected_shape = (n_sample, tot_dist, time, out_dim)
    print("Testing the shape of the StateSpaceNormalDiag samples.")
    assert tf_res.shape == expected_shape, "sample shape not correct."
Example #2
0
    def __init__(self,
                 lat_dim,
                 obs_dim,
                 time_steps,
                 transition_units,
                 emission_layers,
                 poisson=False,
                 binary=False):
        """Sets up the parameters of the Kalman filter sets up super class.

        params:
        -------
        lat_dim: int
        obs_dim: int
        time_steps: int
        transition_units: int
            Number of hidden units for the gated transition transform in DKF.
        poisson: bool
            True if observation is count data. Otherwise, observation is
            continuous.
        """
        q_init = tf.Variable(np.ones(lat_dim))
        dist = tf.contrib.distributions.MultivariateNormalDiag

        em_dist = dist
        if poisson:
            em_dist = MultiPoisson
        elif binary:
            em_dist = MultiBernoulli

        # Prior distribution for initial point
        prior = dist(np.zeros(lat_dim), q_init)
        # Transition model.
        trans_model = ReparameterizedDistribution(
            out_dim=lat_dim,
            in_dim=lat_dim,
            transform=GatedTransition,
            distribution=dist,
            reparam_scale=True,
            hidden_units=transition_units)
        # Emission model is reparameterized Gaussian with linear
        # transformation.
        emission_model = ReparameterizedDistribution(
            out_dim=obs_dim,
            in_dim=lat_dim,
            transform=MLP,
            distribution=em_dist,
            reparam_scale=False,
            hidden_units=emission_layers)

        super(DeepKalmanDynamics, self).__init__(init_model=prior,
                                                 transition_model=trans_model,
                                                 emission_model=emission_model,
                                                 time_steps=time_steps)
Example #3
0
    def __init__(self, state_dim, trans_hidden_units, time_steps):
        """Sets up the necessary networks for the markov dynamics.

        params:
        -------
        state_dim: int
        obs_dim: int
        trans_hidden_units: list of int
        """
        self.time_steps = time_steps
        # dimension of each state space
        self.state_dim = state_dim
        self.dist_type = tf.contrib.distributions.MultivariateNormalDiag

        self.init_model = self.dist_type(
            tf.Variable(np.zeros(self.state_dim)),
            tf.nn.softplus(tf.Variable(np.ones(self.state_dim))))
        # List of transformation per each step
        self.time_transition_model = []
        for time in range(self.time_steps - 1):
            self.time_transition_model.append(
                ReparameterizedDistribution(out_dim=self.state_dim,
                                            in_dim=self.state_dim,
                                            transform=MLP,
                                            distribution=self.dist_type,
                                            hidden_units=trans_hidden_units))
        super(TimeVariantDynamics,
              self).__init__(out_dim=self.state_dim * time_steps)
Example #4
0
    def __init__(self,
                 lat_dim,
                 obs_dim,
                 time_steps,
                 nonlinear_transform,
                 init_transition_matrix_bias=None,
                 poisson=False,
                 binary=False,
                 full_covariance=True,
                 order=1,
                 **kwargs):
        """Sets up the parameters of the Kalman filter sets up super class.

        params:
        -------
        lat_dim: int
        obs_dim: int
        time_steps: int
        nonlinear_transform: transform.Transform type
        init_transition_matrix_bias: np.ndarray shape (lat_dim + 1, lat_dim)
            Initial value for the transition matrix.
        poisson: bool
            If False the imission distribution is Gaussian, if not the emission
            distribution is poisson.
        full_covariance: bool
            Covariance matrices are full if True, otherwise, diagonal.
        """

        if poisson:
            dist = MultiPoisson
        elif binary:
            em_dist = MultiBernoulli

        if full_covariance:
            dist = tf.contrib.distributions.MultivariateNormalTriL
        else:
            dist = tf.contrib.distributions.MultivariateNormalDiag

        # Emission model is reparameterized Gaussian with linear
        # transformation.
        emission_model = ReparameterizedDistribution(
            out_dim=obs_dim,
            in_dim=lat_dim,
            transform=nonlinear_transform,
            distribution=dist,
            reparam_scale=False,
            **kwargs)

        super(FLDS, self).__init__(
            lat_dim=lat_dim,
            obs_dim=obs_dim,
            time_steps=time_steps,
            emission_model=emission_model,
            init_transition_matrix_bias=init_transition_matrix_bias,
            full_covariance=full_covariance,
            order=order)
Example #5
0
    def __init__(self,
                 lat_dim,
                 obs_dim,
                 time_steps,
                 init_transition_matrix_bias=None,
                 full_covariance=True,
                 order=1):
        """Sets up the parameters of the Kalman filter sets up super class.

        params:
        -------
        lat_dim: int
        obs_dim: int
        time_steps: int
        init_transition_matrix_bias: np.ndarray shape (lat_dim + 1, lat_dim)
            Initial value for the transition matrix.
        full_covariance: bool
            Covariance matrices are full if True, otherwise, diagonal.
        """

        if full_covariance:
            dist = tf.contrib.distributions.MultivariateNormalTriL
        else:
            dist = tf.contrib.distributions.MultivariateNormalDiag

        self.emission_matrix = tf.Variable(
            np.random.normal(0, 1, [lat_dim + 1, obs_dim]))
        # Emission model is reparameterized Gaussian with linear
        # transformation.
        emission_model = ReparameterizedDistribution(
            out_dim=obs_dim,
            in_dim=lat_dim,
            transform=LinearTransform,
            distribution=dist,
            reparam_scale=False,
            gov_param=self.emission_matrix)

        super(KalmanFilter, self).__init__(
            lat_dim=lat_dim,
            obs_dim=obs_dim,
            time_steps=time_steps,
            emission_model=emission_model,
            init_transition_matrix_bias=init_transition_matrix_bias,
            full_covariance=full_covariance,
            order=order)
Example #6
0
def test_reparam_gaussian_full_covar():
    """Tester for correctness of linear transformation."""

    in_dim = 3
    out_dim = 2
    n_sample = 1000

    tot_dist = 2
    input_ = np.random.rand(tot_dist, in_dim)
    dist = tf.contrib.distributions.MultivariateNormalTriL

    with tf.Graph().as_default():
        input_tensor = tf.constant(input_)
        d_1 = ReparameterizedDistribution(out_dim=out_dim,
                                          in_dim=in_dim,
                                          distribution=dist,
                                          transform=LinearTransform,
                                          reparam_scale=True)
        d_2 = ReparameterizedDistribution(out_dim=out_dim,
                                          in_dim=in_dim,
                                          distribution=dist,
                                          transform=LinearTransform,
                                          reparam_scale=False)
        s_1 = d_1.sample(n_samples=n_sample, y=input_tensor)
        # Repeat for testing sampling multiple times.
        s_1 = d_1.sample(n_samples=n_sample, y=input_tensor)
        s_2 = d_2.sample(n_samples=n_sample, y=input_tensor)
        s_2 = d_2.sample(n_samples=n_sample, y=input_tensor)

        cov_2 = d_2.get_distribution(y=input_tensor).covariance()
        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            tf_res = sess.run([s_1, s_2])
            tf_cov = sess.run([cov_2])

    expected_shape = (n_sample, tot_dist, out_dim)
    print("Testing the shape of the samples.")
    for i in range(2):
        assert tf_res[i].shape == expected_shape, "sample shape not correct."
Example #7
0
    def __init__(self,
                 lat_dim,
                 obs_dim,
                 time_steps,
                 transition_layers,
                 emission_transform,
                 poisson=False,
                 binary=False,
                 residual=False,
                 full_covariance=False,
                 order=1,
                 **kwargs):
        """Sets up the parameters of the Kalman filter sets up super class.

        params:
        -------
        lat_dim: int
        obs_dim: int
        time_steps: int
        transition_layers: list of int
        nonlinear_transform: transform.Transform type
        poisson: bool
            True if observation is count data. Otherwise, observation is
            continuous.
        residual: bool
            If True MLP transition function is of residual form.
        full_covariance: bool
            Covariance matrices of noise processes are full if True. otherwise,
            diagonal covariance.
        """
        self.full_covariance = full_covariance
        if full_covariance:
            q_init = tf.Variable(np.eye(lat_dim * order))
            dist = tf.contrib.distributions.MultivariateNormalTriL
        else:
            q_init = tf.Variable(np.ones(lat_dim * order))
            dist = tf.contrib.distributions.MultivariateNormalDiag

        em_dist = dist
        if poisson:
            em_dist = MultiPoisson
        elif binary:
            em_dist = MultiBernoulli

        # Prior distribution for initial point
        prior = dist(np.zeros(lat_dim * order), q_init)
        # Transition model.
        trans_model = ReparameterizedDistribution(
            out_dim=lat_dim,
            in_dim=lat_dim * order,
            transform=MLP,
            distribution=dist,
            reparam_scale=False,
            hidden_units=transition_layers,
            residual=residual)
        # Emission model is reparameterized Gaussian with linear
        # transformation.
        emission_model = ReparameterizedDistribution(
            out_dim=obs_dim,
            in_dim=lat_dim,
            transform=emission_transform,
            distribution=em_dist,
            reparam_scale=False,
            **kwargs)

        super(MLPDynamics, self).__init__(init_model=prior,
                                          transition_model=trans_model,
                                          emission_model=emission_model,
                                          time_steps=time_steps,
                                          order=order)
Example #8
0
    def __init__(self,
                 lat_dim,
                 obs_dim,
                 time_steps,
                 emission_model,
                 init_transition_matrix_bias=None,
                 full_covariance=True,
                 order=1):
        """Sets up the parameters of the Kalman filter sets up super class.

        params:
        -------
        lat_dim: int
        obs_dim: int
        time_steps: int
        emission_model: model
            Describing the relation of the model.
        init_transition_matrix_bias: np.ndarray shape (lat_dim + 1, lat_dim)
            Initial value for the transition matrix.
        full_covariance: True
        """
        self.lat_dim = lat_dim
        self.obs_dim = obs_dim

        self.full_covariance = full_covariance
        # TODO: determine what the initial point is.
        # mean_0 = np.random.normal(0, 1, lat_dim * order)
        mean_0 = np.zeros(lat_dim * order)
        if full_covariance:
            dist = tf.contrib.distributions.MultivariateNormalTriL
            cov_0 = tf.Variable(np.eye(lat_dim * order))
        else:
            dist = tf.contrib.distributions.MultivariateNormalDiag
            cov_0 = tf.nn.softplus(tf.Variable(np.ones(lat_dim * order)))

        # Transition matrix for the linear function.
        if init_transition_matrix_bias is None:
            init_ = np.concatenate([np.eye(lat_dim) for i in range(order)],
                                   axis=0)
            init_ = np.concatenate([init_, np.zeros([1, lat_dim])])
            self.transition_matrix = tf.Variable(init_)
        else:
            self.transition_matrix = tf.Variable(init_transition_matrix_bias)
        prior = dist(mean_0, cov_0)

        # Covariance Matrix cholesky factor of evolution.
        cov_t = None
        if full_covariance:
            cov_t = tf.Variable(np.eye(lat_dim))
        else:
            cov_t = tf.Variable(np.zeros(lat_dim))
        # Transition model
        transition_model = ReparameterizedDistribution(
            out_dim=lat_dim,
            in_dim=lat_dim * order,
            transform=LinearTransform,
            distribution=dist,
            reparam_scale=cov_t,
            has_bias=False,
            gov_param=self.transition_matrix)

        # Covariance parameters are cholesky factors, so multiply to get the
        # covariances.
        q_factor = transition_model.scale_param
        self.q_init = None
        self.q_matrix = cov_t
        if not full_covariance:
            self.q_init = tf.diag(cov_0)
            self.q_matrix = tf.diag(tf.nn.softplus(cov_t))
        else:
            self.q_init = tf.matmul(cov_0, cov_0, transpose_b=True)
            self.q_matrix = tf.matmul(cov_t, cov_t, transpose_b=True)
        self.a_matrix = self.transition_matrix[:lat_dim]

        super(LatentLinearDynamicalSystem,
              self).__init__(init_model=prior,
                             transition_model=transition_model,
                             emission_model=emission_model,
                             time_steps=time_steps,
                             order=order)