Exemplo n.º 1
0
    directminimize = None
try:
    import cma
except:  # pragma: no cover
    cma = None
import sobol_seq

from bayeso import acquisition
from bayeso import constants
from bayeso.gp import gp
from bayeso.gp import gp_common
from bayeso.utils import utils_common
from bayeso.utils import utils_covariance
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('bo')


def get_grids(arr_ranges, int_grids):
    """
    It returns grids of given `arr_ranges`, where each of dimension has `int_grids` partitions.

    :param arr_ranges: ranges. Shape: (d, 2).
    :type arr_ranges: numpy.ndarray
    :param int_grids: the number of partitions per dimension.
    :type int_grids: int.

    :returns: grids of given `arr_ranges`. Shape: (`int_grids`:math:`^{\\text{d}}`, d).
    :rtype: numpy.ndarray

    :raises: AssertionError
Exemplo n.º 2
0
# gp_common
# author: Jungtaek Kim ([email protected])
# last updated: August 07, 2020

import numpy as np
import scipy.linalg

from bayeso import covariance
from bayeso import constants
from bayeso.utils import utils_gp
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('gp_common')


def get_kernel_inverse(X_train,
                       hyps,
                       str_cov,
                       is_fixed_noise=constants.IS_FIXED_GP_NOISE,
                       is_gradient=False,
                       debug=False):
    """
    This function computes a kernel inverse without any matrix decomposition techniques.

    :param X_train: inputs. Shape: (n, d) or (n, m, d).
    :type X_train: numpy.ndarray
    :param hyps: dictionary of hyperparameters for Gaussian process.
    :type hyps: dict.
    :param str_cov: the name of covariance function.
    :type str_cov: str.
    :param is_fixed_noise: flag for fixing a noise.
Exemplo n.º 3
0
import os
import numpy as np
try:
    import matplotlib.pyplot as plt
except:
    plt = None
try:
    import pylab
except:
    pylab = None

from bayeso.utils import utils_common
from bayeso.utils import utils_logger
from bayeso import constants

logger = utils_logger.get_logger('utils_plotting')


@utils_common.validate_types
def _set_font_config(use_tex: bool) -> constants.TYPE_NONE: # pragma: no cover
    """
    It sets a font configuration.

    :param use_tex: flag for using latex.
    :type use_tex: bool.

    :returns: None.
    :rtype: NoneType

    """
Exemplo n.º 4
0
"""It defines the functions related to kernels for
Gaussian process regression."""

import time
import numpy as np
import scipy.optimize

from bayeso import covariance
from bayeso import constants
from bayeso.gp import gp_likelihood
from bayeso.utils import utils_gp
from bayeso.utils import utils_covariance
from bayeso.utils import utils_common
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('gp_kernel')


@utils_common.validate_types
def get_optimized_kernel(
        X_train: np.ndarray,
        Y_train: np.ndarray,
        prior_mu: constants.TYPING_UNION_CALLABLE_NONE,
        str_cov: str,
        str_optimizer_method: str = constants.STR_OPTIMIZER_METHOD_GP,
        str_modelselection_method: str = constants.STR_MODELSELECTION_METHOD,
        use_ard: bool = constants.USE_ARD,
        fix_noise: bool = constants.FIX_GP_NOISE,
        debug: bool = False) -> constants.TYPING_TUPLE_TWO_ARRAYS_DICT:
    """
    This function computes the kernel matrix optimized by optimization
Exemplo n.º 5
0
try:
    from scipydirect import minimize as directminimize
except:  # pragma: no cover
    directminimize = None
try:
    import cma
except:  # pragma: no cover
    cma = None

from bayeso import acquisition
from bayeso import constants
from bayeso.utils import utils_covariance
from bayeso.utils import utils_common
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('utils_bo')


@utils_common.validate_types
def normalize_min_max(Y: np.ndarray) -> np.ndarray:
    """
    It normalizes `Y` by min-max normalization.

    :param Y: responses. Shape: (n, 1).
    :type Y: numpy.ndarray

    :returns: normalized responses. Shape: (n, 1).
    :rtype: numpy.ndarray

    :raises: AssertionError
Exemplo n.º 6
0
# gp_gpytorch
# author: Jungtaek Kim ([email protected])
# last updated: August 20, 2020

import time
import numpy as np
import torch
import gpytorch

from bayeso import constants
from bayeso.gp import gp_common
from bayeso.utils import utils_gp
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('gp_gpytorch')


class ExactGPModel(gpytorch.models.ExactGP):
    def __init__(self, str_cov, prior_mu, X_train, Y_train, likelihood):
        super(ExactGPModel, self).__init__(X_train, Y_train, likelihood)

        self.dim_X = X_train.shape[1]

        if prior_mu is None:
            self.mean_module = gpytorch.means.ConstantMean()
        else:
            raise NotImplementedError()

        if str_cov == 'eq' or str_cov == 'se':
            self.covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(ard_num_dims=self.dim_X))
        elif str_cov == 'matern32':
Exemplo n.º 7
0
# gp
# author: Jungtaek Kim ([email protected])
# last updated: August 21, 2020

import time
import numpy as np
import scipy.stats

from bayeso import covariance
from bayeso import constants
from bayeso.gp import gp_common
from bayeso.gp import gp_scipy
from bayeso.utils import utils_gp
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('gp')


def sample_functions(mu, Sigma, num_samples=1):
    """
    It samples `num_samples` functions from multivariate Gaussian distribution (mu, Sigma).

    :param mu: mean vector. Shape: (n, ).
    :type mu: numpy.ndarray
    :param Sigma: covariance matrix. Shape: (n, n).
    :type Sigma: numpy.ndarray
    :param num_samples: the number of sampled functions
    :type num_samples: int., optional

    :returns: sampled functions. Shape: (num_samples, n).
    :rtype: numpy.ndarray
Exemplo n.º 8
0
#
"""It defines Student-:math:`t` process regression."""

import time
import numpy as np
import scipy.stats

from bayeso import covariance
from bayeso import constants
from bayeso.tp import tp_kernel
from bayeso.utils import utils_gp
from bayeso.utils import utils_covariance
from bayeso.utils import utils_common
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('tp')


@utils_common.validate_types
def sample_functions(nu: float,
                     mu: np.ndarray,
                     Sigma: np.ndarray,
                     num_samples: int = 1) -> np.ndarray:
    """
    It samples `num_samples` functions from multivariate Student-$t$ distribution (nu, mu, Sigma).

    :param mu: mean vector. Shape: (n, ).
    :type mu: numpy.ndarray
    :param Sigma: covariance matrix. Shape: (n, n).
    :type Sigma: numpy.ndarray
    :param num_samples: the number of sampled functions
Exemplo n.º 9
0
# last updated: August 10, 2020

import os
import time
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp

from bayeso import covariance
from bayeso import constants
from bayeso.gp import gp_common
from bayeso.utils import utils_gp
from bayeso.utils import utils_covariance
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('gp_tensorflow')
gpus = tf.config.experimental.list_physical_devices('GPU')
if gpus:  # pragma: no cover
    try:
        for gpu in gpus:
            tf.config.experimental.set_memory_growth(gpu, True)
    except RuntimeError as e:
        print(e)


def get_optimized_kernel(X_train,
                         Y_train,
                         prior_mu,
                         str_cov,
                         is_fixed_noise=constants.IS_FIXED_GP_NOISE,
                         num_iters=1000,
Exemplo n.º 10
0
# gp_scipy
# author: Jungtaek Kim ([email protected])
# last updated: August 07, 2020

import time
import numpy as np
import scipy.linalg
import scipy.optimize

from bayeso import constants
from bayeso.gp import gp_common
from bayeso.utils import utils_gp
from bayeso.utils import utils_covariance
from bayeso.utils import utils_logger

logger = utils_logger.get_logger('gp_scipy')


def neg_log_ml(X_train,
               Y_train,
               hyps,
               str_cov,
               prior_mu_train,
               is_fixed_noise=constants.IS_FIXED_GP_NOISE,
               is_cholesky=True,
               is_gradient=True,
               debug=False):
    """
    This function computes a negative log marginal likelihood.

    :param X_train: inputs. Shape: (n, d) or (n, m, d).