예제 #1
0
    def predict_z(self, num_samples: B.Int = 1000):
        """Predict Fourier features.

        Args:
            num_samples (int, optional): Number of samples to use. Defaults to `1000`.

        Returns:
            tuple[vector, vector]: Marginals of the predictions.
        """
        zs = [B.flatten(x) for x in self._sample_p_z(num_samples)]
        m1 = B.mean(B.stack(*zs, axis=0), axis=0)
        m2 = B.mean(B.stack(*[z**2 for z in zs], axis=0), axis=0)
        return m1, m2 - m1**2
예제 #2
0
    def sample_kernel(self, t_k, num_samples: B.Int = 1000):
        """Sample kernel under the mean-field approximation.

        Args:
            t_k (vector): Time point to sample at.
            num_samples (int, optional): Number of samples to use. Defaults to `1000`.

        Returns:
            tensor: Samples.
        """
        us = self.p_u.sample(num_samples)
        sample_kernel = B.jit(self._sample_kernel)
        return B.stack(*[sample_kernel(t_k, u) for u in _columns(us)], axis=0)
예제 #3
0
    def sample_kernel(self, t_k, num_samples: B.Int = 1000):
        """Sample kernel.

        Args:
            t_k (vector): Time point to sample at.
            num_samples (int, optional): Number of samples to use. Defaults to `1000`.

        Returns:
            tensor: Samples.
        """
        us = self._sample_p_u(num_samples)
        sample_kernel = B.jit(self._sample_kernel)
        return B.stack(*[sample_kernel(t_k, u) for u in us], axis=0)
예제 #4
0
    def predict(self, t, num_samples: B.Int = 1000):
        """Predict.

        Args:
            t (vector): Points to predict at.
            num_samples (int, optional): Number of samples to use. Defaults to `1000`.

        Returns:
            tuple: Tuple containing the mean and standard deviation of the
                predictions.
        """
        ts = self.construct_terms(t)

        @B.jit
        def predict_moments(u):
            q_z = self.q_z_optimal(self.ts, u)
            return self._predict_moments(ts, u, B.outer(u), q_z.mean, q_z.m2)

        m1s, m2s = zip(
            *[predict_moments(u) for u in self._sample_p_u(num_samples)])
        m1 = B.mean(B.stack(*m1s, axis=0), axis=0)
        m2 = B.mean(B.stack(*m2s, axis=0), axis=0)
        # Don't forget to add in the observation noise!
        return m1, m2 - m1**2 + self.model.noise
예제 #5
0
    def predict_filter(self, t_h=None, num_samples=1000, min_phase=True):
        """Predict the learned filter.

        Args:
            t_h (vector, optional): Inputs to sample filter at.
            num_samples (int, optional): Number of samples to use. Defaults to `1000`.
            min_phase (bool, optional): Predict a minimum-phase version of the filter.
                Defaults to `True`.

        Returns:
            :class:`collections.namedtuple`: Predictions.
        """
        if t_h is None:
            t_h = B.linspace(self.dtype, -self.extent, self.extent, 601)

        @B.jit
        def sample_h(state):
            state, u = self.approximation.p_u.sample(state)
            u = B.mm(self.K_u, u)  # Transform :math:`\hat u` into :math:`u`.
            h = GP(self.k_h())
            h = h | (h(self.t_u), u)  # Condition on sample.
            state, h = h(t_h).sample(state)  # Sample at desired points.
            return state, B.flatten(h)

        # Perform sampling.
        state = B.global_random_state(self.dtype)
        samples = []
        for _ in range(num_samples):
            state, h = sample_h(state)

            # Transform sample according to specification.
            if min_phase:
                h = transform_min_phase(h)

            samples.append(h)
        B.set_global_random_state(state)

        if min_phase:
            # Start at zero.
            t_h = t_h - t_h[0]
        return summarise_samples(t_h, B.stack(*samples, axis=0))
예제 #6
0
    def predict_psd(self, t_k=None, num_samples=1000):
        """Predict the PSD in dB.

        Args:
            t_k (vector, optional): Inputs to sample kernel at. Will be automatically
                determined if not given.
            num_samples (int, optional): Number of samples to use. Defaults to `1000`.

        Returns:
            :class:`collections.namedtuple`: Predictions.
        """
        if t_k is None:
            t_k = B.linspace(self.dtype, 0, 2 * self.extent, 1000)
        t_k, ks = self.sample_kernel(t_k, num_samples=num_samples)

        # Estimate PSDs.
        freqs, psds = zip(*[estimate_psd(t_k, k, db=False) for k in ks])
        freqs = freqs[0]
        psds = B.stack(*psds, axis=0)

        return summarise_samples(freqs, psds, db=True)
예제 #7
0
 def q_z_samples(self, samples: list):
     # We need a setter, because these won't be trainable through gradients.
     self.model.ps.q_z[self._q_i].samples.delete()
     samples = B.stack(*[B.flatten(x) for x in samples], axis=1)
     self.model.ps.q_z[self._q_i].samples.unbounded(init=samples,
                                                    visible=False)