Example #1
0
    def set_L_alpha(self):
        """
        Invert the covariance matrix, setting L (a lower triangular
        matrix s.t. L L^T = (K + sig_n^2 I)) and alpha, the inverse
        covariance matrix multiplied by the vector of training labels.
        The forces and variances are later obtained using alpha.
        """

        _global_training_data[self.name] = self.training_data
        _global_training_labels[self.name] = self.training_labels_np

        ky_mat = get_ky_mat(self.hyps,
                            self.name,
                            self.kernel,
                            cutoffs=self.cutoffs,
                            hyps_mask=self.hyps_mask,
                            n_cpus=self.n_cpus,
                            n_sample=self.n_sample)

        l_mat = np.linalg.cholesky(ky_mat)
        l_mat_inv = np.linalg.inv(l_mat)
        ky_mat_inv = l_mat_inv.T @ l_mat_inv
        alpha = np.matmul(ky_mat_inv, self.training_labels_np)

        self.ky_mat = ky_mat
        self.l_mat = l_mat
        self.alpha = alpha
        self.ky_mat_inv = ky_mat_inv

        self.likelihood = get_like_from_mats(ky_mat, l_mat,
                                             alpha, self.name)
Example #2
0
    def set_L_alpha(self):
        """
        Invert the covariance matrix, setting L (a lower triangular
        matrix s.t. L L^T = (K + sig_n^2 I)) and alpha, the inverse
        covariance matrix multiplied by the vector of training labels.
        The forces and variances are later obtained using alpha.
        """

        self.sync_data()

        ky_mat = get_Ky_mat(
            self.hyps,
            self.name,
            self.kernel,
            self.energy_kernel,
            self.energy_force_kernel,
            self.energy_noise,
            cutoffs=self.cutoffs,
            hyps_mask=self.hyps_mask,
            n_cpus=self.n_cpus,
            n_sample=self.n_sample,
        )

        l_mat = np.linalg.cholesky(ky_mat)
        l_mat_inv = np.linalg.inv(l_mat)
        ky_mat_inv = l_mat_inv.T @ l_mat_inv
        alpha = np.matmul(ky_mat_inv, self.all_labels)

        self.ky_mat = ky_mat
        self.l_mat = l_mat
        self.alpha = alpha
        self.ky_mat_inv = ky_mat_inv

        self.likelihood = get_like_from_mats(ky_mat, l_mat, alpha, self.name)
        self.n_envs_prev = len(self.training_data)
Example #3
0
    def set_L_alpha_part(self, expert_id):
        """
        Invert the covariance matrix, setting L (a lower triangular
        matrix s.t. L L^T = (K + sig_n^2 I)) and alpha, the inverse
        covariance matrix multiplied by the vector of training labels.
        The forces and variances are later obtained using alpha.
        """

        self.sync_experts_data(expert_id)

        if self.per_expert_parallel and self.n_cpus > 1:
            n_cpus = 1
        else:
            n_cpus = self.n_cpus

        ky_mat = get_Ky_mat(
            self.hyps,
            f"{self.name}_{expert_id}",
            self.kernel,
            self.energy_kernel,
            self.energy_force_kernel,
            self.energy_noise,
            cutoffs=self.cutoffs,
            hyps_mask=self.hyps_mask,
            n_cpus=n_cpus,
            n_sample=self.n_sample,
        )

        self.compute_experts_matrices(ky_mat, expert_id)

        self.likelihood[expert_id] = get_like_from_mats(
            self.ky_mat[expert_id],
            self.l_mat[expert_id],
            self.alpha[expert_id],
            f"{self.name}_{expert_id}",
        )
Example #4
0
    def set_L_alpha(self):
        """
        Set the lower-triangular (L) version of the covariance matrix and alpha vector (
        mapping from  similarity vector of a new training point to output label) for
        each expert.
        :return:
        """

        self.sync_data()

        logger = logging.getLogger(self.logger_name)
        logger.debug("set_L_alpha")

        if self.per_expert_parallel and self.n_cpus > 1:

            time0 = time.time()
            with mp.Pool(processes=self.n_cpus) as pool:
                results = []
                for expert_id in range(self.n_experts):
                    results.append(
                        pool.apply_async(
                            get_Ky_mat,
                            (
                                self.hyps,
                                f"{self.name}_{expert_id}",
                                self.kernel,
                                self.energy_kernel,
                                self.energy_force_kernel,
                                self.energy_noise,
                                self.cutoffs,
                                self.hyps_mask,
                                1,
                                self.n_sample,
                            ),
                        ))
                for i in range(self.n_experts):
                    ky_mat = results[i].get()
                    self.compute_experts_matrices(ky_mat, i)
                pool.close()
                pool.join()
            logger.debug(
                f"set_L_alpha with per_expert_par {time.time()-time0}")
        else:

            for expert_id in range(self.n_experts):

                logger.debug(f"compute L_alpha for {expert_id}")
                time0 = time.time()

                ky_mat = get_Ky_mat(
                    self.hyps,
                    f"{self.name}_{expert_id}",
                    self.kernel,
                    self.energy_kernel,
                    self.energy_force_kernel,
                    self.energy_noise,
                    self.cutoffs,
                    self.hyps_mask,
                    self.n_cpus,
                    self.n_sample,
                )

                self.compute_experts_matrices(ky_mat, expert_id)
                logger.debug(f"{expert_id} compute_L_alpha {time.time()-time0}"
                             f" {len(self.training_data[expert_id])}")

        for expert_id in range(self.n_experts):
            self.likelihood[expert_id] = get_like_from_mats(
                self.ky_mat[expert_id],
                self.l_mat[expert_id],
                self.alpha[expert_id],
                f"{self.name}_{expert_id}",
            )

        self.total_likelihood = np.sum(self.likelihood)