Beispiel #1
0
    def __init__(self, train_x, train_y, num_mixtures=10):
        kernel = SpectralMixtureKernel(num_mixtures)
        kernel.initialize_from_data(train_x, train_y)

        super(SMKernelGP, self).__init__(kernel, train_x, train_y)
        self.mean = gp.means.ConstantMean()
        self.cov = kernel
Beispiel #2
0
 def __init__(self, train_x, train_y, num_mixtures=10):
     smk = SpectralMixtureKernel(num_mixtures)
     smk.initialize_from_data(train_x, train_y)
     kernel = AdditiveKernel(
         smk,
         PolynomialKernel(2),
         RBFKernel(),
     )
     super(CompositeKernelGP, self).__init__(kernel, train_x, train_y)
     self.mean = gp.means.ConstantMean()
     self.smk = smk
class SpectralMixtureGPModel(gpytorch.models.ExactGP):
    def __init__(self, train_x, train_y, likelihood):
        super(SpectralMixtureGPModel, self).__init__(train_x, train_y, likelihood)
        self.mean_module = ConstantMean(prior=SmoothedBoxPrior(-1, 1))
        self.covar_module = SpectralMixtureKernel(n_mixtures=4)
        self.covar_module.initialize_from_data(train_x, train_y)

    def forward(self, x):
        mean_x = self.mean_module(x)
        covar_x = self.covar_module(x)
        return GaussianRandomVariable(mean_x, covar_x)
Beispiel #4
0
class SpectralMixtureGPModel(gpytorch.models.ExactGP):
    def __init__(self, train_x, train_y, likelihood, empspect=False):
        super(SpectralMixtureGPModel, self).__init__(train_x, train_y, likelihood)
        self.mean_module = ConstantMean(prior=SmoothedBoxPrior(-1, 1))
        self.covar_module = SpectralMixtureKernel(num_mixtures=4, ard_num_dims=1)
        if empspect:
            self.covar_module.initialize_from_data(train_x, train_y)
        else:
            self.covar_module.initialize_from_data_empspect(train_x, train_y)

    def forward(self, x):
        mean_x = self.mean_module(x)
        covar_x = self.covar_module(x)
        return MultivariateNormal(mean_x, covar_x)
Beispiel #5
0
class ExactGPModel(gpytorch.models.ExactGP):
    def __init__(self,
                 train_x,
                 train_y,
                 likelihood,
                 var=None,
                 latent=None,
                 kernel_params=None,
                 latent_params=None):
        super(ExactGPModel, self).__init__(train_x, train_y, likelihood)
        if latent_params is None:
            latent_params = {'input_dim': train_x.size(-1)}
        self._set_latent_function(latent, latent_params)

        self.mean_module = ZeroMean()
        ard_num_dims = self.latent_func.embed_dim if self.latent_func.embed_dim is not None else train_x.size(
            -1)

        kernel = kernel_params['type'] if kernel_params is not None else 'rbf'
        if kernel is None or kernel == 'rbf':
            self.kernel_covar_module = ScaleKernel(
                RBFKernel(ard_num_dims=ard_num_dims))
        elif kernel == 'matern':
            self.kernel_covar_module = ScaleKernel(
                MaternKernel(nu=1.5, ard_num_dims=ard_num_dims))
            # without scale kernel: very poor performance
            # matern 0.5, 1.5 and 2.5 all have similar performance
        elif kernel == 'spectral_mixture':
            self.kernel_covar_module = SpectralMixtureKernel(
                num_mixtures=kernel_params['n_mixtures'],
                ard_num_dims=train_x.size(-1))
            self.kernel_covar_module.initialize_from_data(train_x, train_y)
        else:
            raise NotImplementedError

        # set covariance module
        if var is not None:
            self.noise_covar_module = WhiteNoiseKernel(var)
            self.covar_module = self.kernel_covar_module + self.noise_covar_module
        else:
            self.covar_module = self.kernel_covar_module

    def _set_latent_function(self, latent, latent_params):
        if latent is None or latent == 'identity':
            self.latent_func = IdentityLatentFunction()
        elif latent == 'linear':
            if 'embed_dim' not in latent_params:
                latent_params['embed_dim'] = 6
            self.latent_func = LinearLatentFunction(latent_params['input_dim'],
                                                    latent_params['embed_dim'])
        elif latent == 'non_linear':
            if 'embed_dim' not in latent_params:
                latent_params['embed_dim'] = 6
            self.latent_func = NonLinearLatentFunction(
                latent_params['input_dim'], latent_params['embed_dim'],
                latent_params['embed_dim'])
        else:
            raise NotImplementedError

    def forward(self, inp):
        x = self.latent_func(inp)
        mean_x = self.mean_module(x)
        covar_x = self.covar_module(x)
        return MultivariateNormal(mean_x, covar_x)