コード例 #1
0
    def construct_model(self, X, Y):
        y_nests = self.create_nests(X)
        with pm.Model() as self.model:
            self.Xt = theano.shared(X)
            self.Yt = theano.shared(Y)
            self.y_nests = theano.shared(y_nests)
            shapes = {
                'weights': self.n_object_features,
                'weights_k': self.n_object_features
            }

            weights_dict = create_weight_dictionary(self.model_priors, shapes)
            lambda_k = pm.Uniform('lambda_k',
                                  self.alpha,
                                  1.0,
                                  shape=self.n_nests)
            weights = (weights_dict['weights'] / lambda_k[:, None])
            utility = self.eval_utility(weights)
            utility_k = tt.dot(self.features_nests, weights_dict['weights_k'])
            self.p = self.get_probability(utility, lambda_k, utility_k)

            yl = LogLikelihood('yl',
                               loss_func=self.loss_function,
                               p=self.p,
                               observed=self.Yt)
        self.logger.info("Model construction completed")
コード例 #2
0
    def construct_model(self, X, Y):
        """
        Constructs the nested logit model by applying priors on weight vectors **weights** and **weights_k** as per
        :meth:`model_configuration`. Then we apply a uniform prior to the :math:`\\lambda s`, i.e.
        :math:`\\lambda s \\sim Uniform(\\text{alpha}, 1.0)`.The probability of choosing the object :math:`x_i` from
        the query set :math:`Q = \\{x_1, \\ldots ,x_n\\}` is evaluated in :meth:`get_probabilities`.

        Parameters
        ----------
        X : numpy array
            (n_instances, n_objects, n_features)
            Feature vectors of the objects
        Y : numpy array
            (n_instances, n_objects)
            Preferences in the form of discrete choices for given objects

        Returns
        -------
         model : pymc3 Model :class:`pm.Model`
        """
        self.loss_function_ = likelihood_dict.get(self.loss_function, None)
        self.threshold_ = 5e6
        self.trace_ = None
        self.trace_vi_ = None
        if np.prod(X.shape) > self.threshold_:
            upper_bound = int(self.threshold_ / np.prod(X.shape[1:]))
            indices = self.random_state_.choice(X.shape[0],
                                                upper_bound,
                                                replace=False)
            X = X[indices, :, :]
            Y = Y[indices, :]
        logger.info(
            "Train Set instances {} objects {} features {}".format(*X.shape))
        y_nests = self.create_nests(X)
        with pm.Model() as self.model:
            self.Xt_ = theano.shared(X)
            self.Yt_ = theano.shared(Y)
            self.y_nests_ = theano.shared(y_nests)
            shapes = {
                "weights": self.n_object_features_fit_,
                "weights_k": self.n_object_features_fit_,
            }

            weights_dict = create_weight_dictionary(self.model_configuration,
                                                    shapes)
            lambda_k = pm.Uniform("lambda_k",
                                  self.alpha,
                                  1.0,
                                  shape=self.n_nests)
            weights = weights_dict["weights"] / lambda_k[:, None]
            utility = self._eval_utility(weights)
            utility_k = tt.dot(self.features_nests_, weights_dict["weights_k"])
            self.p_ = self.get_probabilities(utility, lambda_k, utility_k)

            LogLikelihood("yl",
                          loss_func=self.loss_function_,
                          p=self.p_,
                          observed=self.Yt_)
        logger.info("Model construction completed")
コード例 #3
0
    def construct_model(self, X, Y):
        """
            Constructs the nested logit model by applying priors on weight vectors **weights** and **weights_k** as per
            :meth:`model_configuration`. Then we apply a uniform prior to the :math:`\lambda s`, i.e.
            :math:`\lambda s \sim Uniform(\\text{alpha}, 1.0)`.The probability of choosing the object :math:`x_i` from
            the query set :math:`Q = \{x_1, \ldots ,x_n\}` is evaluated in :meth:`get_probabilities`.

            Parameters
            ----------
            X : numpy array
                (n_instances, n_objects, n_features)
                Feature vectors of the objects
            Y : numpy array
                (n_instances, n_objects)
                Preferences in the form of discrete choices for given objects

            Returns
            -------
             model : pymc3 Model :class:`pm.Model`
        """
        if np.prod(X.shape) > self.threshold:
            l = int(self.threshold / np.prod(X.shape[1:]))
            indices = self.random_state.choice(X.shape[0], l, replace=False)
            X = X[indices, :, :]
            Y = Y[indices, :]
        self.logger.info(
            "Train Set instances {} objects {} features {}".format(*X.shape))
        y_nests = self.create_nests(X)
        with pm.Model() as self.model:
            self.Xt = theano.shared(X)
            self.Yt = theano.shared(Y)
            self.y_nests = theano.shared(y_nests)
            shapes = {
                'weights': self.n_object_features,
                'weights_k': self.n_object_features
            }

            weights_dict = create_weight_dictionary(self.model_configuration,
                                                    shapes)
            lambda_k = pm.Uniform('lambda_k',
                                  self.alpha,
                                  1.0,
                                  shape=self.n_nests)
            weights = (weights_dict['weights'] / lambda_k[:, None])
            utility = self._eval_utility(weights)
            utility_k = tt.dot(self.features_nests, weights_dict['weights_k'])
            self.p = self.get_probabilities(utility, lambda_k, utility_k)

            yl = LogLikelihood('yl',
                               loss_func=self.loss_function,
                               p=self.p,
                               observed=self.Yt)
        self.logger.info("Model construction completed")