Exemple #1
0
    def train_gp(self):
        """Optimizes the hyperparameters of the current GP model."""

        self.gp.train(logger_name=self.output.basename+'hyps')
        hyps, labels = Parameters.get_hyps(
                self.gp.hyps_mask, self.gp.hyps, constraint=False,
                label=True)
        if labels is None:
            labels = self.gp.hyp_labels
        self.output.write_hyps(labels, hyps,
                               self.start_time,
                               self.gp.likelihood, self.gp.likelihood_gradient,
                               hyps_mask=self.gp.hyps_mask)
    def train_gp(self, max_iter: int = None):
        """
        Train the Gaussian process and write the results to the output file.

        :param max_iter: Maximum iterations associated with this training run,
            overriding the Gaussian Process's internally set maxiter.
        :type max_iter: int
        """
        logger = logging.getLogger(self.logger_name)

        if self.gp_is_mapped:
            logger.debug("Training skipped because of MGP")
            return

        logger.debug("Train GP")

        logger_train = self.output.basename + "hyps"

        # TODO: Improve flexibility in GP training to make this next step
        # unnecessary, so maxiter can be passed as an argument

        # Don't train if maxiter == 0
        if max_iter == 0:
            self.gp.check_L_alpha()
        elif max_iter is not None:
            temp_maxiter = self.gp.maxiter
            self.gp.maxiter = max_iter
            self.gp.train(logger_name=logger_train)
            self.gp.maxiter = temp_maxiter
        else:
            self.gp.train(logger_name=logger_train)

        hyps, labels = Parameters.get_hyps(self.gp.hyps_mask,
                                           self.gp.hyps,
                                           constraint=False,
                                           label=True)
        if labels is None:
            labels = self.gp.hyp_labels
        self.output.write_hyps(
            labels,
            hyps,
            self.start_time,
            self.gp.likelihood,
            self.gp.likelihood_gradient,
            hyps_mask=self.gp.hyps_mask,
        )
        self.train_count += 1
Exemple #3
0
def test_hyps_grad(kernels, diff_cutoff, constraint):

    delta = 1e-8
    d1 = 1
    d2 = 2
    tol = 1e-4

    np.random.seed(10)
    cutoffs, hyps, hm = generate_diff_hm(kernels,
                                         diff_cutoff,
                                         constraint=constraint)
    args = from_mask_to_args(hyps, cutoffs, hm)
    kernel, kernel_grad, _, _, _, _, _ = str_to_kernel_set(kernels, "mc", hm)

    np.random.seed(0)
    env1 = generate_mb_envs(cutoffs, np.eye(3) * 100, delta, d1)
    env2 = generate_mb_envs(cutoffs, np.eye(3) * 100, delta, d2)
    env1 = env1[0][0]
    env2 = env2[0][0]

    k, grad = kernel_grad(env1, env2, d1, d2, *args)

    original = kernel(env1, env2, d1, d2, *args)

    nhyps = len(hyps)
    if hm['train_noise']:
        nhyps -= 1
    original_hyps = Parameters.get_hyps(hm, hyps=hyps)

    for i in range(nhyps):
        newhyps = np.copy(hyps)
        newhyps[i] += delta
        if ('map' in hm.keys()):
            newid = hm['map'][i]
            hm['original_hyps'] = np.copy(original_hyps)
            hm['original_hyps'][newid] += delta
        newargs = from_mask_to_args(newhyps, cutoffs, hm)

        hgrad = (kernel(env1, env2, d1, d2, *newargs) - original) / delta
        if 'map' in hm:
            print(i, "hgrad", hgrad, grad[hm['map'][i]])
            assert (isclose(grad[hm['map'][i]], hgrad, rtol=tol))
        else:
            print(i, "hgrad", hgrad, grad[i])
            assert (isclose(grad[i], hgrad, rtol=tol))
Exemple #4
0
 def hyps_and_labels(self):
     return Parameters.get_hyps(self.hyps_mask,
                                self.hyps,
                                constraint=False,
                                label=True)
    def from_dict(hyps_mask, verbose=False, init_spec=[]):
        """convert dictionary mask to HM instance
        This function is not tested yet
        """

        Parameters.check_instantiation(
            hyps_mask["hyps"], hyps_mask["cutoffs"], hyps_mask["kernels"], hyps_mask
        )

        pm = ParameterHelper(verbose=verbose)

        nspecie = hyps_mask["nspecie"]
        if nspecie > 1:
            max_species = np.max(hyps_mask["specie_mask"])
            specie_mask = hyps_mask["specie_mask"]
            for i in range(max_species + 1):
                elelist = np.where(specie_mask == i)[0]
                if len(elelist) > 0:
                    for ele in elelist:
                        if ele != 0:
                            elename = Z_to_element(ele)
                            if len(init_spec) > 0:
                                if elename in init_spec:
                                    pm.define_group("specie", i, [elename])
                            else:
                                pm.define_group("specie", i, [elename])
        else:
            pm.define_group("specie", i, ["*"])

        for kernel in hyps_mask["kernels"] + ParameterHelper.cutoff_types_keys:
            n = hyps_mask.get("n" + kernel, 0)
            if n >= 0:
                if kernel not in ParameterHelper.cutoff_types:
                    chyps, copt = Parameters.get_component_hyps(
                        hyps_mask, kernel, constraint=True, noise=False
                    )
                    sig = chyps[0]
                    ls = chyps[1]
                    csig = copt[0]
                    cls = copt[1]
                    cutoff = hyps_mask["cutoffs"][kernel]
                    pm.set_parameters("cutoff_" + kernel, cutoff)
                    cutoff_list = hyps_mask.get(
                        f"{kernel}_cutoff_list", np.ones(len(sig)) * cutoff
                    )
                elif kernel in ParameterHelper.cutoff_types and n > 1:
                    cutoff_list = hyps_mask[
                        ParameterHelper.cutoff_types[kernel] + "_cutoff_list"
                    ]

                if n > 1:
                    all_specie = np.arange(nspecie)
                    all_comb = combinations_with_replacement(
                        all_specie, ParameterHelper.ndim[kernel]
                    )
                    for comb in all_comb:
                        mask_id = 0
                        for ele in comb:
                            mask_id += ele
                            mask_id *= nspecie
                        mask_id = mask_id // nspecie
                        ttype = hyps_mask[f"{kernel}_mask"][mask_id]
                        pm.define_group(f"{kernel}", f"{kernel}{ttype}", comb)

                        if (kernel not in ParameterHelper.cutoff_types) and (
                            kernel not in ParameterHelper.cutoff_types_values
                        ):
                            pm.set_parameters(
                                f"{kernel}{ttype}",
                                [sig[ttype], ls[ttype], cutoff_list[ttype]],
                                opt=[csig[ttype], cls[ttype]],
                            )
                        elif kernel in ParameterHelper.cutoff_types_values:
                            pm.set_parameters(
                                f"{kernel}{ttype}",
                                [sig[ttype], ls[ttype]],
                                opt=[csig[ttype], cls[ttype]],
                            )
                        else:
                            pm.set_parameters(f"{kernel}{ttype}", cutoff_list[ttype])
                else:
                    pm.define_group(
                        kernel, kernel, ["*"] * ParameterHelper.ndim[kernel]
                    )
                    if kernel not in ParameterHelper.cutoff_types_keys:
                        pm.set_parameters(
                            kernel, parameters=np.hstack([sig, ls, cutoff]), opt=copt
                        )
                    else:
                        pm.set_parameters(kernel, parameters=cutoff)

        hyps = Parameters.get_hyps(hyps_mask)
        pm.set_parameters("noise", hyps[-1])

        if "cutoffs" in hyps_mask:
            cutoffs = hyps_mask["cutoffs"]
            for k in cutoffs:
                pm.set_parameters(f"cutoff_{k}", cutoffs[k])

        return pm
Exemple #6
0
def rbcm_get_neg_like(
    hyps,
    n_experts,
    name,
    force_kernel,
    logger_name,
    cutoffs,
    hyps_mask,
    n_cpus,
    n_sample,
    per_expert_parallel,
):

    neg_like = 0
    neg_like_grad = None

    logger = logging.getLogger(logger_name)
    time0 = time.time()
    if per_expert_parallel and n_cpus > 1:

        with mp.Pool(processes=n_cpus) as pool:

            results = []
            for i in range(n_experts):
                results.append(
                    pool.apply_async(
                        get_neg_like,
                        (
                            hyps,
                            f"{name}_{i}",
                            force_kernel,
                            logger_name,
                            cutoffs,
                            hyps_mask,
                            1,
                            n_sample,
                        ),
                    ))
            for i in range(n_experts):
                chunk = results[i].get()
                neg_like_ = chunk
                neg_like += neg_like_
            pool.close()
            pool.join()
    else:
        for i in range(n_experts):
            neg_like_ = get_neg_like(
                hyps,
                f"{name}_{i}",
                force_kernel,
                logger_name,
                cutoffs,
                hyps_mask,
                n_cpus,
                n_sample,
            )
            neg_like += neg_like_

    logger.info("")
    logger.info(f"Hyperparameters: {list(hyps)}")
    logger.info(f"Total Likelihood: {-neg_like}")
    logger.info(f"One step {time.time()-time0}")

    ohyps, label = Parameters.get_hyps(hyps_mask,
                                       hyps,
                                       constraint=False,
                                       label=True)
    if label:
        logger.info(f"oHyp_array: {list(ohyps)}")
        for i, l in enumerate(label):
            logger.info(f"oHyp {l}: {ohyps[i]}")

    return neg_like