Exemplo n.º 1
0
def loocv2(KrigInfo, errtype="rmse"):
    # Take required info from KrigInfo

    nsamp = KrigInfo["nsamp"]
    F = KrigInfo["F"]
    y = KrigInfo["y"]
    LOOCVpred = np.zeros(shape=[len(y), 1])

    for i in range(nsamp):
        KrigInfotemp = deepcopy(KrigInfo)
        xLOOCV = np.delete(KrigInfo['X_norm'], i, 0)
        xpredLOOCV = KrigInfo['X_norm'][i, :]
        yLOOCV = np.delete(KrigInfo['y'], i, 0)
        KrigInfotemp["F"] = np.delete(F, i, 0)
        KrigInfotemp['y'] = yLOOCV[:]
        KrigInfotemp['X_norm'] = xLOOCV[:]
        KrigInfotemp['nsamp'] = nsamp - 1
        if KrigInfotemp["kernel"] == ["iso_gaussian"]:
            KrigInfotemp = likelihood(KrigInfotemp['Theta'][0],
                                      KrigInfotemp,
                                      mode='all',
                                      trainvar=False)
        else:
            KrigInfotemp = likelihood(KrigInfotemp['Theta'],
                                      KrigInfotemp,
                                      mode='all',
                                      trainvar=False)
        KrigInfotemp['standardization'] = False
        KrigInfotemp['X'] = KrigInfotemp['X_norm']
        LOOCVpred[i, 0] = prediction(xpredLOOCV,
                                     KrigInfotemp,
                                     predtypes=['pred'],
                                     drm=None)

    LOOCVerr = errperf(y, LOOCVpred, errtype)

    return LOOCVerr, LOOCVpred
Exemplo n.º 2
0
def tune_hyperparameters(KrigInfo,
                         xhyp_ii,
                         trainvar,
                         ubhyp=None,
                         lbhyp=None,
                         sigmacmaes=None,
                         scaling=None,
                         optimbound=None):
    """Estimate the best hyperparameters.

    Extracted hyperpamaeter tuning code into a function for
    parallelisation.

    Args:
        KrigInfo (dict): Dictionary that contains Kriging information.
        xhyp_ii (nparray): starting point number ii.
        ubhyp (nparray): upper bounds of hyperparams.
        lbhyp (nparray): lower bounds of hyperparams.
        sigmacmaes (float): initial sigma for cma-es.
        scaling (list): scaling for cma-es.
        optimbound: bounds for optimizer.

    Returns:
        bestxcand (np.array(float)): Best x candidate array
        neglnlikecand (float): Negative ln-likelihood candidate

    Raises:
        ValueError: If a required parameter for the chosen optimizer is
            missing.
    """
    if KrigInfo["optimizer"] == "cmaes":
        for p in (ubhyp, lbhyp, sigmacmaes, scaling):
            if p is None:
                raise ValueError(f'{p} must be set if optimizer is cmaes.')
        bestxcand, es = cma.fmin2(
            likelihood,
            xhyp_ii,
            sigmacmaes, {
                'bounds': [lbhyp.tolist(), ubhyp.tolist()],
                'scaling_of_variables': scaling,
                'verb_disp': 0,
                'verbose': -9
            },
            args=(KrigInfo, 'default', trainvar))
        neglnlikecand = es.result[1]

    elif KrigInfo["optimizer"] == "lbfgsb":
        if optimbound is None:
            raise ValueError('optimbound must be set if optimizer is lbfgsb.')
        res = minimize(likelihood,
                       xhyp_ii,
                       method='L-BFGS-B',
                       options={'eps': 1e-03},
                       bounds=optimbound,
                       args=(KrigInfo, 'default', trainvar))
        bestxcand = res.x
        neglnlikecand = res.fun

    elif KrigInfo["optimizer"] == "slsqp":
        if optimbound is None:
            raise ValueError('optimbound must be set if optimizer is slsqp.')
        res = minimize(likelihood,
                       xhyp_ii,
                       method='SLSQP',
                       bounds=optimbound,
                       args=(KrigInfo, 'default', trainvar))
        bestxcand = res.x
        neglnlikecand = res.fun

    elif KrigInfo["optimizer"] == "cobyla":
        if optimbound is None:
            raise ValueError('optimbound must be set if optimizer is cobyla.')
        res = fmin_cobyla(likelihood,
                          xhyp_ii,
                          optimbound,
                          rhobeg=0.5,
                          rhoend=1e-4,
                          args=(KrigInfo, 'default', trainvar))
        bestxcand = res
        neglnlikecand = likelihood(res, KrigInfo, trainvar=trainvar)

    else:
        msg = (f"{KrigInfo['optimizer']} in KrigInfo['Optimizer'] is not "
               f"recognised.")
        raise KeyError(msg)
    return bestxcand, neglnlikecand
Exemplo n.º 3
0
    def simultpredehvi(self, disp=False):
        """
        Perform multi updates on EHVI MOBO using Kriging believer method.

        Returns:
             xalltemp (nparray) : Array of design variables updates.
             yalltemp (nparray) : Array of objectives value updates.
             metricall (nparray) : Array of metric of the updates.
        """

        krigtemp = [0] * len(self.kriglist)
        for index, obj in enumerate(self.kriglist):
            krigtemp[index] = deepcopy(obj)
        yprednext = np.zeros(shape=[len(krigtemp)])
        sprednext = np.zeros(shape=[len(krigtemp)])
        ypartemp = self.ypar
        yall = self.yall

        for ii in range(self.multiupdate):
            t1 = time.time()
            if disp:
                print(f"update number {ii+1}")
            else:
                pass

            xnext, metrictemp = run_multi_opt(krigtemp, self.moboInfo,
                                              ypartemp, self.krigconstlist,
                                              self.cheapconstlist)
            bound = np.vstack(
                (-np.ones(shape=[1, krigtemp[0].KrigInfo["nvar"]]),
                 np.ones(shape=[1, krigtemp[0].KrigInfo["nvar"]])))

            for jj in range(len(krigtemp)):
                yprednext[jj], sprednext[jj] = krigtemp[jj].predict(
                    xnext, ['pred', 's'])
                krigtemp[jj].KrigInfo['X'] = np.vstack(
                    (krigtemp[jj].KrigInfo['X'], xnext))
                krigtemp[jj].KrigInfo['y'] = np.vstack(
                    (krigtemp[jj].KrigInfo['y'], yprednext[jj]))
                krigtemp[jj].standardize()
                krigtemp[jj].KrigInfo["F"] = compute_regression_mat(
                    krigtemp[jj].KrigInfo["idx"],
                    krigtemp[jj].KrigInfo["X_norm"], bound,
                    np.ones(shape=[krigtemp[jj].KrigInfo["nvar"]]))
                krigtemp[jj].KrigInfo = likelihood(
                    krigtemp[jj].KrigInfo['Theta'],
                    krigtemp[jj].KrigInfo,
                    mode='all',
                    trainvar=krigtemp[jj].trainvar)

            if ii == 0:
                xalltemp = deepcopy(xnext)
                yalltemp = deepcopy(yprednext)
                salltemp = deepcopy(sprednext)
                metricall = deepcopy(metrictemp)
            else:
                xalltemp = np.vstack((xalltemp, xnext))
                yalltemp = np.vstack((yalltemp, yprednext))
                salltemp = np.vstack((salltemp, sprednext))
                metricall = np.vstack((metricall, metrictemp))

            yall = np.vstack((yall, yprednext))
            ypartemp, _ = searchpareto.paretopoint(yall)

            if disp:
                print("time: ", time.time() - t1, " s")

        return [xalltemp, yalltemp, salltemp, metricall]
Exemplo n.º 4
0
    def train(self, n_cpu=1, disp=True, pre_theta=None, pool=None):
        """
        Train Kriging model
        
        Args:
            n_cpu (bool): If > 1, uses parallel processing. Defaults
                to 1.
            disp (bool, optional): Print updates. Defaults to False.
            pre_theta #TODO: document this, if working.
            pool (int, optional): A multiprocessing.Pool instance.
                Will be passed to functions for use, if specified.
                Defaults to None.
        Returns:
            None
        """
        if disp:
            print("Begin train hyperparam.")

        # Isotropic gaussian kernel
        if self.KrigInfo["kernel"] == ["iso_gaussian"
                                       ]:  # TODO: Should this be in a list?
            self.nbhyp = 1
        elif len(self.KrigInfo["kernel"]
                 ) != 1 and "iso_gaussian" in self.KrigInfo["kernel"]:
            raise NotImplementedError(
                "Isotropic Gaussian kernel is not available for composite kernel"
            )
        else:
            if len(self.KrigInfo["ubhyp"]) != self.nbhyp:
                self.nbhyp = len(self.KrigInfo["ubhyp"])

        # Create multiple starting points
        if self.KrigInfo['nrestart'] < 1:
            xhyp = self.nbhyp * [0]
        else:
            if self.nbhyp <= 40:
                _, xhyp = sampling('sobol',
                                   len(self.KrigInfo["ubhyp"]),
                                   self.KrigInfo['nrestart'],
                                   result="real",
                                   upbound=self.KrigInfo["ubhyp"],
                                   lobound=self.KrigInfo["lbhyp"])
            else:
                _, xhyp = sampling('sobolnew',
                                   len(self.KrigInfo["ubhyp"]),
                                   self.KrigInfo['nrestart'],
                                   result="real",
                                   upbound=self.KrigInfo["ubhyp"],
                                   lobound=self.KrigInfo["lbhyp"])

        # multiple starting from pre-trained theta:
        if pre_theta is not None:
            xhyp = np.random.rand(self.KrigInfo['nrestart'] - 1,
                                  len(self.KrigInfo["ubhyp"])) + pre_theta
            xhyp = np.vstack((pre_theta, xhyp))

        # Optimize hyperparam if number of hyperparameter is 1 using golden section method
        if self.nbhyp == 1:
            if self.KrigInfo["optimizer"] != "ga":
                res = minimize_scalar(likelihood,
                                      bounds=(self.lb, self.ub),
                                      method='golden',
                                      args=(self.KrigInfo, 'default',
                                            self.trainvar))
                if self.KrigInfo["kernel"] == ["iso_gaussian"]:
                    best_x = res.x
                else:
                    best_x = np.array([res.x])
                neglnlikecand = likelihood(best_x,
                                           self.KrigInfo,
                                           trainvar=self.trainvar)
            else:
                best_x, neglnlikecand, _ = uncGA(likelihood,
                                                 lb=self.lb,
                                                 ub=self.ub,
                                                 npop=100,
                                                 maxg=100,
                                                 args=(self.KrigInfo,
                                                       'default',
                                                       self.trainvar))
            if disp:
                print(f"Best hyperparameter is {best_x}")
                print(f"With NegLnLikelihood of {neglnlikecand}")
        else:
            # Set Bounds and Constraints for Optimizer
            # Set Bounds for LBSGSB or SLSQP if one is used.
            if self.KrigInfo["optimizer"] == "lbfgsb" or self.KrigInfo[
                    "optimizer"] == "slsqp":
                optimbound = np.transpose(
                    np.vstack(
                        (self.KrigInfo["lbhyp"], self.KrigInfo["ubhyp"])))
            # Set Constraints for Cobyla if used
            elif self.KrigInfo["optimizer"] == "cobyla":
                optimbound = []
                for i in range(len(self.KrigInfo["ubhyp"])):
                    # params aa and bb are not used, just to avoid error in Cobyla optimizer
                    optimbound.append(lambda x, Kriginfo, aa, bb, itemp=i: x[
                        itemp] - self.KrigInfo["lbhyp"][itemp])
                    optimbound.append(lambda x, Kriginfo, aa, bb, itemp=i: self
                                      .KrigInfo["ubhyp"][itemp] - x[itemp])
            else:
                optimbound = None

            if disp:
                print(
                    f"Training {self.KrigInfo['nrestart']} hyperparameter(s)")

            # Train hyperparams
            bestxcand, neglnlikecand = self.parallelopt(xhyp,
                                                        n_cpu,
                                                        optimbound,
                                                        disp=disp,
                                                        pool=pool)

            # Search best hyperparams among the candidates
            I = np.argmin(neglnlikecand)
            best_x = bestxcand[I, :]

            if disp:
                print("Single Objective, train hyperparam, end.")
                print(f"Best hyperparameter is {best_x}")
                print(f"With NegLnLikelihood of {neglnlikecand[I]}")

            # Calculate Kriging model based on the best hyperparam.

        self.KrigInfo = likelihood(best_x,
                                   self.KrigInfo,
                                   mode='all',
                                   trainvar=self.trainvar)
Exemplo n.º 5
0
    def simultpredehvi(self, disp=False, pool=None):
        """
        Perform multi updates on EHVI MOBO using Kriging believer method.

        Args:
            disp (bool, optional): Print progress. Defaults to True.
            pool (int, optional): A multiprocessing.Pool instance.
                Will be passed to functions for use, if specified.
                Defaults to None.

        Returns:
            xalltemp (np.ndarray): [n_kb, n_dv] array of update design
                variable values.
            yalltemp (np.ndarray): [n_kb, n_obj] array of update
                objective values.
            salltemp (np.ndarray): [n_kb, n_obj] array of update
                objective uncertainty values.
            metricall (np.ndarray): [n_kb, 1] array of update metric
                values.
        """
        n_krig = len(self.kriglist)
        n_dv = self.kriglist[0].KrigInfo["nvar"]

        krigtemp = [deepcopy(obj) for obj in self.kriglist]
        yprednext = np.zeros([n_krig])
        sprednext = np.zeros([n_krig])

        xalltemp = np.empty([self.multiupdate, n_dv])
        yalltemp = np.empty([self.multiupdate, n_krig])
        salltemp = np.empty([self.multiupdate, n_krig])
        metricall = np.empty([self.multiupdate, 1])

        ypartemp = self.ypar
        yall = self.yall

        for ii in range(self.multiupdate):
            t1 = time.time()
            if disp:
                print(f"update number {ii+1}")

            xnext, metrictemp = run_multi_opt(
                krigtemp,
                self.moboInfo,
                ypartemp,
                krigconstlist=self.krigconstlist,
                cheapconstlist=self.cheapconstlist,
                pool=pool)

            bound = np.vstack((-np.ones([1, n_dv]), np.ones([1, n_dv])))

            for jj, krig in enumerate(krigtemp):
                yprednext[jj], sprednext[jj] = krig.predict(
                    xnext, ['pred', 's'])
                krig.KrigInfo['X'] = np.vstack((krig.KrigInfo['X'], xnext))
                krig.KrigInfo['y'] = np.vstack(
                    (krig.KrigInfo['y'], yprednext[jj]))
                krig.standardize()
                krig.KrigInfo["F"] = compute_regression_mat(
                    krig.KrigInfo["idx"], krig.KrigInfo["X_norm"], bound,
                    np.ones([n_dv]))
                krig.KrigInfo = likelihood(krig.KrigInfo['Theta'],
                                           krig.KrigInfo,
                                           mode='all',
                                           trainvar=krig.trainvar)

            xalltemp[ii, :] = xnext[:]
            yalltemp[ii, :] = yprednext[:]
            salltemp[ii, :] = sprednext[:]
            metricall[ii, :] = metrictemp

            yall = np.vstack((yall, yprednext))
            ypartemp, _ = searchpareto.paretopoint(yall)

            if disp:
                print(f"time: {time.time() - t1:.2f} s")

        return xalltemp, yalltemp, salltemp, metricall