Exemplo n.º 1
0
    def test_logprob(self):

        y = self._build_placeholder([1.0, 2.5, 4.3, 6.1, 7.8])

        ssm = LocalLinearTrendStateSpaceModel(
            num_timesteps=5,
            level_scale=0.5,
            slope_scale=0.5,
            initial_state_prior=tfd.MultivariateNormalDiag(
                scale_diag=self._build_placeholder([1., 1.])))

        lp = ssm.log_prob(y[..., np.newaxis])
        expected_lp = -5.801624298095703
        self.assertAllClose(self.evaluate(lp), expected_lp)
Exemplo n.º 2
0
  def test_joint_sample(self):
    strm = test_util.test_seed_stream()
    batch_shape = [4, 3]

    level_scale = self._build_placeholder(2 * np.ones(batch_shape))
    slope_scale = self._build_placeholder(0.2 * np.ones(batch_shape))
    observation_noise_scale = self._build_placeholder(1.)
    initial_state_prior = tfd.MultivariateNormalDiag(
        loc=self._build_placeholder([-3, 0.5]),
        scale_diag=self._build_placeholder([0.1, 0.2]))

    ssm = LocalLinearTrendStateSpaceModel(
        num_timesteps=10,
        level_scale=level_scale,
        slope_scale=slope_scale,
        observation_noise_scale=observation_noise_scale,
        initial_state_prior=initial_state_prior)

    num_samples = 10000
    sampled_latents, sampled_obs = ssm._joint_sample_n(n=num_samples,
                                                       seed=strm())
    latent_mean, obs_mean = ssm._joint_mean()
    latent_cov, obs_cov = ssm._joint_covariances()
    (sampled_latents_, sampled_obs_,
     latent_mean_, obs_mean_,
     latent_level_std_,
     level_slope_std_,
     obs_std_) = self.evaluate(
         (sampled_latents, sampled_obs,
          latent_mean, obs_mean,
          tf.sqrt(latent_cov[..., 0, 0]),
          tf.sqrt(latent_cov[..., 1, 1]),
          tf.sqrt(obs_cov[..., 0])))
    latent_std_ = np.stack([latent_level_std_, level_slope_std_], axis=-1)

    # Instead of directly comparing means and stddevs, we normalize by stddev
    # to make the stderr constant.
    self.assertAllClose(np.mean(sampled_latents_, axis=0) / latent_std_,
                        latent_mean_ / latent_std_,
                        atol=4. / np.sqrt(num_samples))
    self.assertAllClose(np.mean(sampled_obs_, axis=0) / obs_std_,
                        obs_mean_ / obs_std_,
                        atol=4. / np.sqrt(num_samples))
    self.assertAllClose(np.std(sampled_latents_, axis=0) / latent_std_,
                        np.ones(latent_std_.shape, dtype=latent_std_.dtype),
                        atol=4. / np.sqrt(num_samples))
    self.assertAllClose(np.std(sampled_obs_, axis=0) / obs_std_,
                        np.ones(obs_std_.shape, dtype=obs_std_.dtype),
                        atol=4. / np.sqrt(num_samples))
Exemplo n.º 3
0
  def test_batch_shape(self):
    batch_shape = [4, 2]
    partial_batch_shape = [2]

    level_scale = self._build_placeholder(
        np.exp(np.random.randn(*partial_batch_shape)))
    slope_scale = self._build_placeholder(np.exp(np.random.randn(*batch_shape)))
    initial_state_prior = tfd.MultivariateNormalDiag(
        scale_diag=self._build_placeholder([1., 1.]))

    ssm = LocalLinearTrendStateSpaceModel(
        num_timesteps=10,
        level_scale=level_scale,
        slope_scale=slope_scale,
        initial_state_prior=initial_state_prior)
    self.assertAllEqual(self.evaluate(ssm.batch_shape_tensor()), batch_shape)

    y = ssm.sample()
    self.assertAllEqual(self.evaluate(tf.shape(input=y))[:-2], batch_shape)
Exemplo n.º 4
0
    def test_matches_locallineartrend(self):
        """SemiLocalLinearTrend with trivial AR process is a LocalLinearTrend."""

        level_scale = self._build_placeholder(0.5)
        slope_scale = self._build_placeholder(0.5)
        initial_level = self._build_placeholder(3.)
        initial_slope = self._build_placeholder(-2.)
        num_timesteps = 5
        y = self._build_placeholder([1.0, 2.5, 4.3, 6.1, 7.8])

        semilocal_ssm = SemiLocalLinearTrendStateSpaceModel(
            num_timesteps=num_timesteps,
            level_scale=level_scale,
            slope_scale=slope_scale,
            slope_mean=self._build_placeholder(0.),
            autoregressive_coef=self._build_placeholder(1.),
            initial_state_prior=tfd.MultivariateNormalDiag(
                loc=[initial_level, initial_slope],
                scale_diag=self._build_placeholder([1., 1.])))

        local_ssm = LocalLinearTrendStateSpaceModel(
            num_timesteps=num_timesteps,
            level_scale=level_scale,
            slope_scale=slope_scale,
            initial_state_prior=tfd.MultivariateNormalDiag(
                loc=[initial_level, initial_slope],
                scale_diag=self._build_placeholder([1., 1.])))

        semilocal_lp = semilocal_ssm.log_prob(y[:, tf.newaxis])
        local_lp = local_ssm.log_prob(y[:, tf.newaxis])
        self.assertAllClose(self.evaluate(semilocal_lp),
                            self.evaluate(local_lp))

        semilocal_mean = semilocal_ssm.mean()
        local_mean = local_ssm.mean()
        self.assertAllClose(self.evaluate(semilocal_mean),
                            self.evaluate(local_mean))

        semilocal_variance = semilocal_ssm.variance()
        local_variance = local_ssm.variance()
        self.assertAllClose(self.evaluate(semilocal_variance),
                            self.evaluate(local_variance))
Exemplo n.º 5
0
  def test_identity(self):

    # Test that an additive SSM with a single component defines the same
    # distribution as the component model.

    y = self._build_placeholder([1.0, 2.5, 4.3, 6.1, 7.8])

    local_ssm = LocalLinearTrendStateSpaceModel(
        num_timesteps=5,
        level_scale=0.3,
        slope_scale=0.6,
        observation_noise_scale=0.1,
        initial_state_prior=tfd.MultivariateNormalDiag(
            scale_diag=self._build_placeholder([1., 1.])))

    additive_ssm = AdditiveStateSpaceModel([local_ssm])

    local_lp = local_ssm.log_prob(y[:, np.newaxis])
    additive_lp = additive_ssm.log_prob(y[:, np.newaxis])
    self.assertAllClose(self.evaluate(local_lp), self.evaluate(additive_lp))
Exemplo n.º 6
0
  def test_stats(self):

    # Build a model with expected initial loc 0 and slope 1.
    level_scale = self._build_placeholder(1.0)
    slope_scale = self._build_placeholder(1.0)
    initial_state_prior = tfd.MultivariateNormalDiag(
        loc=self._build_placeholder([0, 1.]),
        scale_diag=self._build_placeholder([1., 1.]))

    ssm = LocalLinearTrendStateSpaceModel(
        num_timesteps=10,
        level_scale=level_scale,
        slope_scale=slope_scale,
        initial_state_prior=initial_state_prior)

    # In expectation, the process grows linearly.
    mean = self.evaluate(ssm.mean())
    self.assertAllClose(mean, np.arange(0, 10)[:, np.newaxis])

    # slope variance at time T is linear: T * slope_scale
    expected_variance = [1, 3, 8, 18, 35, 61, 98, 148, 213, 295]
    variance = self.evaluate(ssm.variance())
    self.assertAllClose(variance, np.array(expected_variance)[:, np.newaxis])
Exemplo n.º 7
0
    def test_sum_of_local_linear_trends(self):

        # We know analytically that the sum of two local linear trends is
        # another local linear trend, with means and variances scaled
        # accordingly, so the additive model should match this behavior.

        level_scale = 0.5
        slope_scale = 1.1
        initial_level = 3.
        initial_slope = -2.
        observation_noise_scale = 0.
        num_timesteps = 5
        y = self._build_placeholder([1.0, 2.5, 4.3, 6.1, 7.8])

        # Combine two local linear trend models, one a full model, the other
        # with just a moving mean (zero slope).
        local_ssm = LocalLinearTrendStateSpaceModel(
            num_timesteps=num_timesteps,
            level_scale=level_scale,
            slope_scale=slope_scale,
            initial_state_prior=tfd.MultivariateNormalDiag(
                loc=self._build_placeholder([initial_level, initial_slope]),
                scale_diag=self._build_placeholder([1., 1.])))

        second_level_scale = 0.3
        second_initial_level = 1.1
        moving_level_ssm = LocalLinearTrendStateSpaceModel(
            num_timesteps=num_timesteps,
            level_scale=second_level_scale,
            slope_scale=0.,
            initial_state_prior=tfd.MultivariateNormalDiag(
                loc=self._build_placeholder([second_initial_level, 0.]),
                scale_diag=self._build_placeholder([1., 0.])))

        additive_ssm = AdditiveStateSpaceModel(
            [local_ssm, moving_level_ssm],
            observation_noise_scale=observation_noise_scale)

        # Build the analytical sum of the two processes.
        target_ssm = LocalLinearTrendStateSpaceModel(
            num_timesteps=num_timesteps,
            level_scale=np.float32(
                np.sqrt(level_scale**2 + second_level_scale**2)),
            slope_scale=np.float32(slope_scale),
            observation_noise_scale=observation_noise_scale,
            initial_state_prior=tfd.MultivariateNormalDiag(
                loc=self._build_placeholder(
                    [initial_level + second_initial_level,
                     initial_slope + 0.]),
                scale_diag=self._build_placeholder(np.sqrt([2., 1.]))))

        # Test that both models behave equivalently.
        additive_mean = additive_ssm.mean()
        target_mean = target_ssm.mean()
        self.assertAllClose(self.evaluate(additive_mean),
                            self.evaluate(target_mean))

        additive_variance = additive_ssm.variance()
        target_variance = target_ssm.variance()
        self.assertAllClose(self.evaluate(additive_variance),
                            self.evaluate(target_variance))

        additive_lp = additive_ssm.log_prob(y[:, np.newaxis])
        target_lp = target_ssm.log_prob(y[:, np.newaxis])
        self.assertAllClose(self.evaluate(additive_lp),
                            self.evaluate(target_lp))