def __init__(self, input_dim, energy=np.asarray([1.]), frequency=np.asarray([2*np.pi]), variance=1., lengthscales=1., len_fixed=False): gpflow.kernels.Stationary.__init__(self, input_dim, variance=variance, lengthscales=lengthscales, active_dims=None, ARD=False) # self.variance = Param(variance, transforms.positive()) # self.lengthscale = Param(lengthscale, transforms.positive()) self.num_partials = len(frequency) energy_list = [] frequency_list = [] for i in range(self.num_partials): energy_list.append(Param(energy[i], transforms.positive)) frequency_list.append(Param(frequency[i], transforms.positive)) self.energy = ParamList(energy_list) self.frequency = ParamList(frequency_list) # self.energy.fixed = True # self.frequency.fixed = True # self.energy = energy # self.frequency = frequency if len_fixed: self.lengthscales.fixed = True
def __init__(self, input_dim, num_partials, lengthscales=None, variances=None, frequencies=None): gpflow.kernels.Kern.__init__(self, input_dim, active_dims=None) len_l = [] var_l = [] freq_l = [] self.ARD = False self.num_partials = num_partials if lengthscales.all() == None: lengthscales = 1. * np.ones((num_partials, 1)) variances = 0.125 * np.ones((num_partials, 1)) frequencies = 1. * (1. + np.arange(num_partials)) for i in range(self.num_partials): len_l.append(Param(lengthscales[i], transforms.Logistic(0., 2.))) var_l.append(Param(variances[i], transforms.Logistic(0., 1.))) freq_l.append(Param(frequencies[i], transforms.positive)) self.lengthscales = ParamList(len_l) self.variance = ParamList(var_l) self.frequency = ParamList(freq_l)
def __init__(self, input_dim, energy=np.asarray([1.]), frequency=np.asarray([2 * np.pi]), variance=1.0, features_as_params=False): """ - input_dim is the dimension of the input to the kernel - variance is the (initial) value for the variance parameter(s) if ARD=True, there is one variance per input - active_dims is a list of length input_dim which controls which columns of X are used. """ gpflow.kernels.Kern.__init__(self, input_dim, active_dims=None) self.num_features = len(frequency) self.variance = Param(variance, transforms.Logistic(0., 0.25)) if features_as_params: energy_list = [] frequency_list = [] for i in range(energy.size): energy_list.append(Param(energy[i], transforms.positive)) frequency_list.append(Param(frequency[i], transforms.positive)) self.energy = ParamList(energy_list) self.frequency = ParamList(frequency_list) else: self.energy = energy self.frequency = frequency
def __init__(self, input_dim, variance=1., lengthscales=None, energy=None, frequencies=None, len_fixed=True): gpflow.kernels.Kern.__init__(self, input_dim, active_dims=None) energy_l = [] freq_l = [] self.ARD = False self.num_partials = len(energy) for i in range(self.num_partials): energy_l.append(Param(energy[i], transforms.positive)) freq_l.append(Param(frequencies[i], transforms.positive)) self.energy = ParamList(energy_l) self.frequency = ParamList(freq_l) self.variance = Param(variance, transforms.positive) self.lengthscales = Param(lengthscales, transforms.positive) self.vars_n_freqs_fixed(fix_energy=True, fix_freq=True) if len_fixed: self.lengthscales.fixed = True
def __init__(self, kern, q_mu, q_sqrt, Z, mean_function): Parameterized.__init__(self) nodes = [] for n_kern, n_q_mu, n_q_s in zip(kern, q_mu, q_sqrt): nodes.append(Node(n_kern, n_q_mu, n_q_s, Z)) self.nodes = ParamList(nodes) self.mean_function = mean_function
def __init__(self, operands, oper): """ :param operands: list of acquisition objects :param oper: a tf.reduce operation (e.g., tf.reduce_sum) for aggregating the returned scores of each operand. """ super(AcquisitionAggregation, self).__init__() assert (all([isinstance(x, Acquisition) for x in operands])) self.operands = ParamList(operands) self._oper = oper
def __init__(self, X, Y, inducing_points, final_inducing_points, hidden_units, units, share_inducing_inputs=True): Model.__init__(self) assert X.shape[0] == Y.shape[0] self.num_data, D_X = X.shape self.D_Y = 1 self.num_samples = 100 kernels = [] for l in range(hidden_units + 1): ks = [] if (l > 0): D = units else: D = D_X if (l < hidden_units): for w in range(units): ks.append( RBF(D, lengthscales=1., variance=1.) + White(D, variance=1e-5)) else: ks.append(RBF(D, lengthscales=1., variance=1.)) kernels.append(ks) self.dims_in = [D_X] + [units] * hidden_units self.dims_out = [units] * hidden_units + [1] q_mus, q_sqrts, Zs, mean_functions = init_layers( X, self.dims_in, self.dims_out, inducing_points, final_inducing_points, share_inducing_inputs) layers = [] for q_mu, q_sqrt, Z, mean_function, kernel in zip( q_mus, q_sqrts, Zs, mean_functions, kernels): layers.append(Layer(kernel, q_mu, q_sqrt, Z, mean_function)) self.layers = ParamList(layers) for layer in self.layers[:-1]: # fix the inner layer mean functions layer.mean_function.fixed = True self.likelihood = Gaussian() minibatch_size = 10000 if X.shape[0] > 10000 else None if minibatch_size is not None: self.X = MinibatchData(X, minibatch_size) self.Y = MinibatchData(Y, minibatch_size) else: self.X = DataHolder(X) self.Y = DataHolder(Y)
def __init__(self, models=[], optimize_restarts=5): """ :param models: list of GPflow models representing our beliefs about the problem :param optimize_restarts: number of optimization restarts to use when training the models """ super(Acquisition, self).__init__() models = np.atleast_1d(models) assert all(isinstance(model, (Model, ModelWrapper)) for model in models) self._models = ParamList([DataScaler(m) for m in models]) assert (optimize_restarts >= 0) self.optimize_restarts = optimize_restarts self._needs_setup = True
def _optimize_models(self): # Optimize model #1 self.operands[0]._optimize_models() # Copy it again if needed due to changed free state if self._needs_new_copies: new_copies = [copy.deepcopy(self.operands[0]) for _ in range(len(self.operands) - 1)] for c in new_copies: c.optimize_restarts = 0 self.operands = ParamList([self.operands[0]] + new_copies) self._needs_new_copies = False # Draw samples using HMC # Sample each model of the acquisition function - results in a list of 2D ndarrays. hypers = np.hstack([model.sample(len(self.operands), **self._sample_opt) for model in self.models]) # Now visit all acquisition copies, and set state for idx, draw in enumerate(self.operands): draw.set_state(hypers[idx, :])
def __init__(self, X, Y, Z, kernels, likelihood, num_latent_Y=None, minibatch_size=None, num_samples=1, mean_function=Zero()): Model.__init__(self) assert X.shape[0] == Y.shape[0] assert Z.shape[1] == X.shape[1] assert kernels[0].input_dim == X.shape[1] self.num_data, D_X = X.shape self.num_samples = num_samples self.D_Y = num_latent_Y or Y.shape[1] self.dims = [k.input_dim for k in kernels] + [ self.D_Y, ] q_mus, q_sqrts, Zs, mean_functions = init_layers( X, Z, self.dims, mean_function) layers = [] for q_mu, q_sqrt, Z, mean_function, kernel in zip( q_mus, q_sqrts, Zs, mean_functions, kernels): layers.append(Layer(kernel, q_mu, q_sqrt, Z, mean_function)) self.layers = ParamList(layers) for layer in self.layers[:-1]: # fix the inner layer mean functions layer.mean_function.fixed = True self.likelihood = likelihood if minibatch_size is not None: self.X = MinibatchData(X, minibatch_size) self.Y = MinibatchData(Y, minibatch_size) else: self.X = DataHolder(X) self.Y = DataHolder(Y)
def __init__(self, X_variational_mean, X_variational_var, Y, kern, t, kern_t, M , Z=None): """ Initialization of Bayesian Gaussian Process Dynamics Model. This method only works with Gaussian likelihood. :param X_variational_mean: initial latent positions, size N (number of points) x Q (latent dimensions). :param X_variational_var: variance of latent positions (N x Q), for the initialisation of the latent space. :param Y: data matrix, size N (number of points) x D (dimensions). :param kern: kernel specification, by default RBF. :param t: time stamps. :param kern_t: dynamics kernel specification, by default RBF. :param M: number of inducing points. :param Z: matrix of inducing points, size M (inducing points) x Q (latent dimensions), By default random permutation of X_mean. """ super(BayesianDGPLVM, self).__init__(name='BayesianDGPLVM') self.kern = kern assert len(X_variational_mean) == len(X_variational_var), 'must be same amount of time series' self.likelihood = likelihoods.Gaussian() # multiple sequences series = [] for i in range(len(X_variational_mean)): series.append(GPTimeSeries(X_variational_mean[i], X_variational_var[i], t[i])) self.series = ParamList(series) # inducing points if Z is None: # By default we initialize by permutation of initial Z = np.random.permutation(np.concatenate(X_variational_mean, axis=0).copy())[:M] else: assert Z.shape[0] == M self.Z = Param(Z) self.kern_t = kern_t self.Y = DataHolder(Y) self.M = M self.n_s = 0
def __init__(self, prev_ind_list, cur_ind_list, X_grid, kerns_list, name='collaborative_pref_gps'): Model.__init__(self, name) total_shape = total_all_actions(prev_ind_list) Y = np.ones(total_shape)[:, None] self.Y = DataHolder(Y) # Introducing Paramlist to define kernels for latent GPs H self.kerns_list = ParamList(kerns_list) self.X_grid = DataHolder(X_grid[:, None]) self.prev_ind_list = prev_ind_list self.cur_ind_list = cur_ind_list # define likelihood self.likelihood = gpflow.likelihoods.Bernoulli()