예제 #1
0
 def __init__(self, operands, oper):
     """
     :param operands: list of acquisition objects
     :param oper: a tf.reduce operation (e.g., tf.reduce_sum) for aggregating the returned scores of each operand.
     """
     super(AcquisitionAggregation, self).__init__()
     assert (all([isinstance(x, Acquisition) for x in operands]))
     self.operands = ParamList(operands)
     self._oper = oper
     self.setup()
예제 #2
0
 def __init__(self, list_of_means):
     """
     :param list list_of_means: A list of MeanFunction object.
     """
     output_dim = 0
     for m in list_of_means:
         output_dim += m.output_dim
     MeanFunction.__init__(self, output_dim)
     # MeanFunctions are stored as ParamList
     self.mean_list = ParamList(list_of_means)
예제 #3
0
 def __init__(self, list_of_kerns):
     """
     :param list list_of_kerns: A list of Kernel object.
     """
     output_dim = 0
     for k in list_of_kerns:
         # assert k is Kernel object
         assert (isinstance(k, Kern))
         output_dim += k.output_dim
     Kern.__init__(self, output_dim)
     kernels.Kern.__init__(self, input_dim=None)
     # kernels are stored as ParamList
     self.kern_list = ParamList(list_of_kerns)
예제 #4
0
    def __init__(self, models=[], optimize_restarts=5):
        """
        :param models: list of GPflow models representing our beliefs about the problem
        :param optimize_restarts: number of optimization restarts to use when training the models
        """
        super(Acquisition, self).__init__()
        models = np.atleast_1d(models)
        assert all(
            isinstance(model, (Model, ModelWrapper)) for model in models)
        self._models = ParamList([DataScaler(m) for m in models])

        assert (optimize_restarts >= 0)
        self.optimize_restarts = optimize_restarts
        self._optimize_models()
예제 #5
0
    def __init__(self, models=[], optimize_restarts=5):
        """
        :param models: list of GPflow models representing our beliefs about the problem
        :param optimize_restarts: number of optimization restarts to use when training the models
        """
        super(Acquisition, self).__init__()
        self._models = ParamList(
            [DataScaler(m) for m in np.atleast_1d(models).tolist()])
        self._default_params = list(
            map(lambda m: m.get_free_state(), self._models))

        assert (optimize_restarts >= 0)
        self.optimize_restarts = optimize_restarts
        self._optimize_models()
예제 #6
0
    def __init__(self,
                 X,
                 Y,
                 Z,
                 kernels,
                 likelihood,
                 num_latent_Y=None,
                 minibatch_size=None,
                 num_samples=1,
                 mean_function=Zero()):
        Model.__init__(self)

        assert X.shape[0] == Y.shape[0]
        assert Z.shape[1] == X.shape[1]
        assert kernels[0].input_dim == X.shape[1]

        self.num_data, D_X = X.shape
        self.num_samples = num_samples
        self.D_Y = num_latent_Y or Y.shape[1]

        self.dims = [k.input_dim for k in kernels] + [
            self.D_Y,
        ]
        q_mus, q_sqrts, Zs, mean_functions = init_layers(
            X, Z, self.dims, mean_function)

        layers = []
        for q_mu, q_sqrt, Z, mean_function, kernel in zip(
                q_mus, q_sqrts, Zs, mean_functions, kernels):
            layers.append(Layer(kernel, q_mu, q_sqrt, Z, mean_function))
        self.layers = ParamList(layers)

        for layer in self.layers[:-1]:  # fix the inner layer mean functions
            layer.mean_function.fixed = True

        self.likelihood = likelihood

        if minibatch_size is not None:
            self.X = MinibatchData(X, minibatch_size)
            self.Y = MinibatchData(Y, minibatch_size)
        else:
            self.X = DataHolder(X)
            self.Y = DataHolder(Y)
예제 #7
0
 def models(self):
     return ParamList([
         model for acq in self.operands
         for model in acq.models.sorted_params
     ])