예제 #1
0
    def __init__(self,
                 X,
                 Y,
                 kernel=None,
                 Y_metadata=None,
                 changepoint=0,
                 changepointDim=0):

        Ny = Y.shape[0]

        if Y_metadata is None:
            Y_metadata = {  #'output_index':np.arange(Ny)[:,None],
                'side':
                np.array([
                    0 if x < changepoint else 1 for x in X[:, changepointDim]
                ])[:, None]
            }
        else:
            assert Y_metadata['output_index'].shape[0] == Ny

        if kernel is None:
            kernel = kern.RBF(X.shape[1])

        #Likelihood
        #likelihoods_list = [likelihoods.Gaussian(name="Gaussian_noise_%s" %j) for j in range(Ny)]
        # noise_terms = np.unique(Y_metadata['output_index'].flatten())
        # likelihoods_list = [likelihoods.Gaussian(name="Gaussian_noise_%s" %j) for j in noise_terms]
        side1Noise = likelihoods.Gaussian(name="Gaussian_noise_side1")
        side2Noise = likelihoods.Gaussian(name="Gaussian_noise_side2")
        #likelihoods_list = [side1Noise if x < changepoint else side2Noise for x in X[:,changepointDim]]
        likelihood = MixedNoise_twoSide(side1Noise, side2Noise)

        super(GPHeteroscedasticRegression_twoSided,
              self).__init__(X, Y, kernel, likelihood, Y_metadata=Y_metadata)
예제 #2
0
파일: layers.py 프로젝트: jaimeide/PyDeepGP
    def __init__(self, layer_lower, dim_up, X=None, X_variance=None, Z=None, num_inducing=10, kernel=None, inference_method=None, noise_var=1e-2, init='rand', mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, name='hiddenlayer'):
        self.dim_up, self.dim_down = dim_up, layer_lower.X.shape[1]
        likelihood = likelihoods.Gaussian(variance=noise_var)
        self.variationalterm = NormalEntropy()

        super(HiddenLayer, self).__init__(layer_lower, self.dim_down, dim_up, likelihood, init=init, X=X, X_variance=X_variance, Z=Z,
                                          num_inducing=num_inducing, kernel=kernel, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, back_constraint=back_constraint, encoder=encoder, auto_update=auto_update,  name=name)
예제 #3
0
    def __init__(self,
                 X,
                 Y,
                 kernel=None,
                 Y_metadata=None,
                 normalizer=None,
                 noise_var=1.,
                 mean_function=None,
                 A=None):

        if kernel is None:
            kernel = kern.RBF(X.shape[1])

        likelihood = likelihoods.Gaussian(variance=noise_var)

        super(GPRegression_Group, self).__init__(X,
                                                 Y,
                                                 kernel,
                                                 likelihood,
                                                 name='GP regression group',
                                                 Y_metadata=Y_metadata,
                                                 normalizer=normalizer,
                                                 mean_function=mean_function)
        self.inference_method = ExactGaussianInferenceGroup()
        self.A = A
예제 #4
0
파일: layers.py 프로젝트: jaimeide/PyDeepGP
 def __init__(self, dim_down, dim_up, Y, X=None, X_variance=None, Z=None, num_inducing=10, kernel=None, inference_method=None, likelihood=None, init='rand', 
                 mpi_comm=None, mpi_root=0, back_constraint=True, encoder=None, auto_update=True, repeatX=False, repeatXsplit=0, name='obslayer'):
     self.dim_up, self.dim_down = dim_up, dim_down
     self._Y = Y
     self.repeatX = repeatX
     self.repeatXsplit = repeatXsplit
     if likelihood is None:  likelihood = likelihoods.Gaussian()
     self._toplayer_ = False
     self.variationalterm = NormalEntropy()
     super(ObservedLayer, self).__init__(None, self.dim_down, dim_up, likelihood, init=init, X=X, X_variance=X_variance, Z=Z, 
                                       num_inducing=num_inducing, kernel=kernel, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root,  back_constraint=back_constraint, encoder=encoder, auto_update=auto_update, name=name)
예제 #5
0
    def __init__(self,
                 X,
                 Y,
                 kern,
                 mu_old,
                 Su_old,
                 Kaa_old,
                 Z_old,
                 Z,
                 likelihood=likelihoods.Gaussian(),
                 mean_function=None):
        """
        X is a data matrix, size N x D
        Y is a data matrix, size N x R
        Z is a matrix of pseudo inputs, size M x D
        kern, mean_function are appropriate gpflow objects
        mu_old, Su_old are mean and covariance of old q(u)
        Z_old is the old inducing inputs
        This method only works with a Gaussian likelihood.
        """

        #        X = X
        #        Y=Y

        self.X = Param('input', X)
        self.Y = Param('output', Y)

        # likelihood = likelihoods.Gaussian()
        #        GPModel.__init__(self, X, Y, kern, likelihood, mean_function)
        GP.__init__(self,
                    X,
                    Y,
                    kern,
                    likelihood,
                    mean_function,
                    inference_method=None)
        #        GP.__init__(self, X, Y, kern, likelihood, mean_function)

        #        SparseGP.__init__(self, X, Y, Z, kern, likelihood, mean_function, inference_method = GPy.inference.latent_function_inference.VarDTC())
        #        SparseGP.__init__(self, X, Y, Z, kern, likelihood, mean_function, inference_method = None)

        self.Z = Param('inducing inputs', Z)
        self.link_parameter(self.Z)
        self.mean_function = mean_function
        self.num_data = X.shape[0]
        self.num_latent = Y.shape[1]

        self.mu_old = mu_old
        self.M_old = Z_old.shape[0]
        self.Su_old = Su_old
        self.Kaa_old = Kaa_old
        self.Z_old = Z_old
        self.ARD = True
        self.grad_fun = grad(self.objective)
예제 #6
0
    def __init__(self, X, Y, kernel, warping_function_name=None, warping_indices=None, warping_hid_dims = None, warping_out_dim = None, zdim = None, \
                 X_indices =None, X_warped_indices = None,  normalizer=False, Xmin=None, Xmax=None, epsilon=None):
        if X.ndim == 1:
            X = X.reshape(-1, 1)
        self.Xdim = X.shape[1]
        self.X_untransformed = X.copy()
        self.kernel = kernel

        self.warping_function_name = warping_function_name
        self.warping_indices = warping_indices
        self.warping_hid_dims = warping_hid_dims
        self.warping_out_dim = warping_out_dim
        self.warping_functions = dict()
        self.X_indices = X_indices
        self.X_warped_indices = X_warped_indices
        self.update_warping_functions = True  # set as False if random embedding

        if warping_function_name is None:
            raise NotImplementedError
        else:
            for k, v in warping_indices.items():
                if len(v) > 0:
                    self.warping_functions[k] = NNwarpingFunction(
                        v, self.warping_hid_dims[k], self.warping_out_dim[k],
                        self.X_warped_indices[k], k)
                else:
                    self.warping_functions[k] = None

        self.X_warped_th = self.transform_data(self.X_untransformed)
        if self.warping_function_name == 'nn':
            X_np = []
            for k, v in self.X_warped_th.items():
                if type(v) == torch.Tensor:
                    X_np.append(v.detach().numpy())
                else:
                    X_np.append(v)
            self.X_warped = np.concatenate(X_np, axis=1)
        likelihood = likelihoods.Gaussian()
        super(FidelitywarpingModel, self).__init__(self.X_warped,
                                                   Y,
                                                   likelihood=likelihood,
                                                   kernel=kernel,
                                                   normalizer=normalizer,
                                                   name='FidelitywarpingModel')

        # Add the parameters in the warping function to the model parameters hierarchy
        for k, v in self.warping_functions.items():
            if v is not None:
                self.link_parameter(v)
예제 #7
0
def gp_regression(inference_method: Optional[str] = None,
                  likelihood_hyperprior: Optional[PriorMap] = None) -> dict:
    """Build model dict of GP regression."""
    model_dict = dict()

    # Set likelihood.
    likelihood = likelihoods.Gaussian()
    if likelihood_hyperprior is not None:
        # set likelihood hyperpriors
        likelihood = set_priors(likelihood, likelihood_hyperprior)
    model_dict['likelihood'] = likelihood

    # Set inference method.
    if inference_method == 'laplace':
        inference_method = Laplace()
    model_dict['inference_method'] = inference_method

    return model_dict
예제 #8
0
    def __init__(self, Y, dim_down, dim_up, likelihood, MLP_dims=None, X=None, X_variance=None, init='rand',  Z=None, num_inducing=10,  kernel=None, inference_method=None, uncertain_inputs=True,mpi_comm=None, mpi_root=0, back_constraint=True, name='mrd-view'):

        self.uncertain_inputs = uncertain_inputs
        self.layer_lower = None
        self.scale = 1.

        if back_constraint:
            from .mlp import MLP
            from copy import deepcopy
            self.encoder = MLP([dim_down, int((dim_down+dim_up)*2./3.), int((dim_down+dim_up)/3.), dim_up] if MLP_dims is None else [dim_down]+deepcopy(MLP_dims)+[dim_up])
            X = self.encoder.predict(Y.mean.values if isinstance(Y, VariationalPosterior) else Y)
            X_variance = 0.0001*np.ones(X.shape)
            self.back_constraint = True
        else:
            self.back_constraint = False

        if Z is None:
            Z = np.random.rand(num_inducing, dim_up)*2-1. #np.random.permutation(X.copy())[:num_inducing]
        assert Z.shape[1] == X.shape[1]
        
        if likelihood is None: likelihood = likelihoods.Gaussian(variance=Y.var()*0.01)
        
        if uncertain_inputs: X = NormalPosterior(X, X_variance)
        if kernel is None: kernel = kern.RBF(dim_up, ARD = True)
        
        # The command below will also give the field self.X to the view.
        super(MRDView, self).__init__(X, Y, Z, kernel, likelihood, inference_method=inference_method, mpi_comm=mpi_comm, mpi_root=mpi_root, name=name)
        if back_constraint: self.link_parameter(self.encoder)

        if self.uncertain_inputs and self.back_constraint:
            from GPy import Param
            from GPy.core.parameterization.transformations import Logexp
            self.X_var_common = Param('X_var',X_variance[0].copy(),Logexp())
            self.link_parameters(self.X_var_common)
        # There's some redundancy in the self.Xv and self.X. Currently we use self.X for the likelihood part and all calculations part,
        # self.Xv is only used for the self.Xv.gradient part. 
        # This is redundant but it's there in case we want to do the product of experts MRD model.
        self.Xv = self.X
예제 #9
0
    def __init__(self,
                 X,
                 Y,
                 Z,
                 kernels,
                 name='gp_msgp',
                 interpolation_method=None,
                 grid_dims=None,
                 normalize=False):
        super(GPMSGP, self).__init__(name)

        self.X = ObsAr(X)  #Not sure what Obsar

        if grid_dims is None:
            dims = [None] * len(Z)
            max_dim_ii = 0
            for ii in range(len(Z)):
                dims[ii] = np.arange(max_dim_ii,
                                     max_dim_ii + np.shape(Z[ii])[1])
                max_dim_ii = dims[ii - 1][-1] + 1
            grid_dims = dims
        else:
            grid_dims_to_create_id = []
            grid_dims_create = []
            grid_create_args = []
            n_grid_dims = len(grid_dims)
            for ii in range(n_grid_dims):
                if isinstance(Z[ii], dict):
                    grid_dims_to_create_id.append(ii)
                    grid_dims_create.append(grid_dims[ii])
                    grid_create_args.append(Z[ii])

            if len(grid_dims_to_create_id) > 1:
                Z_create = self.create_grid(grid_create_args,
                                            grid_dims=grid_dims_create)

                for ii in range(len(grid_dims_to_create_id)):
                    Z[grid_dims_to_create_id[ii]] = Z_create[ii]

        self.Z = Z
        self.input_grid_dims = grid_dims
        """
        if isinstance(Z,dict): #automatically create the grid
            Z,self.input_grid_dims = self.create_grid(Z,grid_dims = grid_dims)
            self.Z = Z
        else:
            
            self.input_grid_dims = grid_dims
            self.Z = Z
        """

        if normalize:
            with_mean = True
            with_std = True
        else:
            with_mean = False
            with_std = False

        self.normalizer = StandardScaler(with_mean=with_mean,
                                         with_std=with_std)
        self.X = self.normalizer.fit_transform(self.X)
        self.Z_normalizers = [None] * len(self.Z)

        self.Z_normalizers = [
            StandardScaler(with_mean=with_mean, with_std=with_std).fit(X_z)
            for X_z in self.Z
        ]
        self.Z = [
            self.Z_normalizers[ii].transform(self.Z[ii])
            for ii in range(len(self.Z))
        ]

        self.num_data, self.input_dim = self.X.shape

        assert Y.ndim == 2

        self.Y = ObsAr(Y)

        self.Y_metadata = None  #TO-DO: do we even need this?

        assert np.shape(Y)[0] == self.num_data

        _, self.output_dim = self.Y.shape

        #check if kernels is a list or just a single kernel
        #and then check if every object in list is a kernel

        try:
            for kernel in kernels:
                assert isinstance(kernel, Kern)

        except TypeError:
            assert isinstance(kernels, Kern)
            kernels = list([kernels])

        self.inference_method = GridGaussianInference()

        self.likelihood = likelihoods.Gaussian()  #TO-DO: do we even need this?

        self.kern = KernGrid(kernels,
                             self.likelihood,
                             self.input_grid_dims,
                             interpolation_method=interpolation_method)

        self.mean_function = Constant(self.input_dim, self.output_dim)
        self.kern.update_Z(Z)
        ##for test set n_neighbors = 4
        self.kern.init_interpolation_method(n_neighbors=8)
        self.kern.update_X_Y(X, Y)

        ## register the parameters for optimization (paramz)
        self.link_parameter(self.kern)
        self.link_parameter(self.likelihood)

        ## need to do this in the case that someone wants to do prediction without/before
        ## hyperparameter optimization
        self.parameters_changed()
        self.posterior_prediction = self.inference_method.update_prediction_vectors(
            self.kern, self.posterior, self.grad_dict, self.likelihood)
예제 #10
0
    def __init__(self,
                 layer_upper,
                 Xs,
                 X_win=0,
                 Us=None,
                 U_win=1,
                 Z=None,
                 num_inducing=10,
                 kernel=None,
                 inference_method=None,
                 likelihood=None,
                 noise_var=1.,
                 inducing_init='kmeans',
                 back_cstr=False,
                 MLP_dims=None,
                 name='layer'):

        self.layer_upper = layer_upper
        self.nSeq = len(Xs)

        self.X_win = X_win  # if X_win==0, it is not autoregressive.
        self.X_dim = Xs[0].shape[1]
        self.Xs_flat = Xs
        self.X_observed = False if isinstance(Xs[0],
                                              VariationalPosterior) else True

        self.withControl = Us is not None
        self.U_win = U_win
        self.U_dim = Us[0].shape[1] if self.withControl else None
        self.Us_flat = Us
        if self.withControl:
            assert len(Xs) == len(
                Us
            ), "The number of signals should be equal to the number controls!"

        if not self.X_observed and back_cstr:
            self._init_encoder(MLP_dims)
            self.back_cstr = True
        else:
            self.back_cstr = False
        self._init_XY()

        if Z is None:
            if not back_cstr and inducing_init == 'kmeans':
                from sklearn.cluster import KMeans
                m = KMeans(n_clusters=num_inducing, n_init=1000, max_iter=100)
                m.fit(self.X.mean.values.copy())
                Z = m.cluster_centers_.copy()
            else:
                Z = np.random.randn(num_inducing, self.X.shape[1])
        assert Z.shape[1] == self.X.shape[1]

        if kernel is None: kernel = kern.RBF(self.X.shape[1], ARD=True)

        if inference_method is None: inference_method = VarDTC()
        if likelihood is None:
            likelihood = likelihoods.Gaussian(variance=noise_var)
        self.normalPrior, self.normalEntropy = NormalPrior(), NormalEntropy()
        super(Layer, self).__init__(self.X,
                                    self.Y,
                                    Z,
                                    kernel,
                                    likelihood,
                                    inference_method=inference_method,
                                    name=name)
        if not self.X_observed:
            if back_cstr:
                assert self.X_win > 0
                self.link_parameters(*(self.init_Xs + self.Xs_var +
                                       [self.encoder]))
            else:
                self.link_parameters(*self.Xs_flat)