Esempio n. 1
0
    def optimize_step(self, k_params, n_params, update=True):
        """
        Runs one step of optimization
        Args:
            k_params (): optimizer params for kernel optimization
            n_params (): optimizer params for noise uptimization
            update (): whether or not to update parameters

        Returns: updated k_params, n_params

        """

        if self.opt_idx is None:
            self.opt_idx = list(range(self.d))
        if self.optimizer is None:
            self.optimizer = SGD()
        if self.kernel_grads is None:
            self.kernel_grads = []
            for i in self.opt_idx:
                self.kernel_grads.append((jacobian(self.kernels[i].eval),
                                          jacobian(self.kernels[i].log_prior)))

        # Optimizing kernel hyperparameters
        for i, d in enumerate(self.opt_idx):
            k_d_params = k_params[i] if k_params[i] is not None else None
            grad_kern_marginal = np.clip(self.grad_marginal_k(i, d),
                                         -self.max_grad, self.max_grad)
            grad_kern_penalty = np.clip(self.grad_penalty_k(i, d),
                                        -self.max_grad, self.max_grad)
            grad_kern = grad_kern_marginal - grad_kern_penalty
            self.kernels[d].params, k_d_params = \
                self.optimizer.step((self.kernels[d].params, grad_kern),
                                    k_d_params)
            k_params[i] = k_d_params

        # Optimizing observation noise
        noise_trans = inv_softplus(self.noise)
        grad_noise = np.clip(self.grad_marginal_noise(), -self.max_grad,
                             self.max_grad)
        grad_noise_trans = expit(noise_trans) * grad_noise
        noise_trans, n_params = \
            self.optimizer.step((noise_trans, grad_noise_trans),
                                n_params)
        self.noise = softplus(noise_trans)
        n_params = n_params

        # updating kernel and calculating loss
        self.construct_Ks()
        loss = -self.marginal()
        if update:
            self.solve()
        return k_params, n_params, loss
Esempio n. 2
0
    def pack_params(self, lengthscale, variance):

        return inv_softplus(np.array([lengthscale, variance]))
Esempio n. 3
0
 def pack_params(self, w, mu, sigma):
     return inv_softplus(np.hstack([w, mu, sigma]))
Esempio n. 4
0
    def pack_params(self, lengthscale, variance, noise, weights):

        return np.hstack(
            [inv_softplus(np.array([lengthscale, variance, noise])), weights])