示例#1
0
    def compute_lds(
        self,
        F,
        feat_static_cat: Tensor,
        seasonal_indicators: Tensor,
        time_feat: Tensor,
        length: int,
        prior_mean: Optional[Tensor] = None,
        prior_cov: Optional[Tensor] = None,
        lstm_begin_state: Optional[List[Tensor]] = None,
    ):
        # embed categorical features and expand along time axis
        embedded_cat = self.embedder(feat_static_cat)
        repeated_static_features = embedded_cat.expand_dims(axis=1).repeat(
            axis=1, repeats=length)

        # construct big features tensor (context)
        features = F.concat(time_feat, repeated_static_features, dim=2)

        output, lstm_final_state = self.lstm.unroll(
            inputs=features,
            begin_state=lstm_begin_state,
            length=length,
            merge_outputs=True,
        )

        if prior_mean is None:
            prior_input = F.slice_axis(output, axis=1, begin=0,
                                       end=1).squeeze(axis=1)

            prior_mean = self.prior_mean_model(prior_input)
            prior_cov_diag = self.prior_cov_diag_model(prior_input)
            prior_cov = make_nd_diag(F, prior_cov_diag, self.issm.latent_dim())

        emission_coeff, transition_coeff, innovation_coeff = self.issm.get_issm_coeff(
            seasonal_indicators)

        noise_std, innovation, residuals = self.lds_proj(output)

        lds = LDS(
            emission_coeff=emission_coeff,
            transition_coeff=transition_coeff,
            innovation_coeff=F.broadcast_mul(innovation, innovation_coeff),
            noise_std=noise_std,
            residuals=residuals,
            prior_mean=prior_mean,
            prior_cov=prior_cov,
            latent_dim=self.issm.latent_dim(),
            output_dim=1,
            seq_length=length,
        )

        return lds, lstm_final_state
示例#2
0
def test_lds_likelihood(data_filename):
    """
    Test to check that likelihood is correctly computed for different
    innovation state space models (ISSM).
    Note that ISSM is a special case of LDS.
    """
    with gzip.GzipFile(data_filename, 'r') as fp:
        data = json.load(fp=fp)

    lds = LDS(
        mx.nd.array(data["emission_coeff"]),
        mx.nd.array(data["transition_coeff"]),
        mx.nd.array(data["innovation_coeff"]),
        mx.nd.array(data["noise_std"]),
        mx.nd.array(data["residuals"]),
        mx.nd.array(data["prior_mean"]),
        mx.nd.array(data["prior_covariance"]),
        data["latent_dim"],
        data["output_dim"],
        data["seq_length"],
    )

    targets = mx.nd.array(data["targets"])
    true_likelihood = mx.nd.array(data["true_likelihood"])

    batch_size = lds.emission_coeff[0].shape[0]
    time_length = len(lds.emission_coeff)
    output_dim = lds.emission_coeff[0].shape[1]
    latent_dim = lds.emission_coeff[0].shape[2]

    likelihood, final_mean, final_cov = lds.log_prob(targets)

    assert_shape_and_finite(likelihood, shape=(batch_size, time_length))
    assert_shape_and_finite(final_mean, shape=(batch_size, latent_dim))
    assert_shape_and_finite(final_cov,
                            shape=(batch_size, latent_dim, latent_dim))

    likelihood_per_item = likelihood.sum(axis=1)

    np.testing.assert_almost_equal(
        likelihood_per_item.asnumpy(),
        true_likelihood.asnumpy(),
        decimal=2,
        err_msg=f"Likelihood did not match: \n "
        f"true likelihood = {true_likelihood},\n"
        f"obtained likelihood = {likelihood_per_item}",
    )

    samples = lds.sample(num_samples=100)

    assert_shape_and_finite(samples,
                            shape=(100, batch_size, time_length, output_dim))

    sample = lds.sample()

    assert_shape_and_finite(sample, lds.batch_shape + lds.event_shape)

    ll = lds.log_prob(sample)

    assert_shape_and_finite(ll, lds.batch_shape)