Esempio n. 1
0
    def construct_model(self, X, Y):
        """
            Constructs the nested logit model by applying priors on weight vectors **weights** and **weights_k** as per
            :meth:`model_configuration`. Then we apply a uniform prior to the :math:`\\lambda s`, i.e.
            :math:`\\lambda s \\sim Uniform(\\text{alpha}, 1.0)`.The probability of choosing the object :math:`x_i` from the
            query set :math:`Q = \\{x_1, \\ldots ,x_n\\}` is evaluated in :meth:`get_probabilities`.

            Parameters
            ----------
            X : numpy array
                (n_instances, n_objects, n_features)
                Feature vectors of the objects
            Y : numpy array
                (n_instances, n_objects)
                Preferences in the form of discrete choices for given objects

            Returns
            -------
             model : pymc3 Model :class:`pm.Model`
        """
        self.trace_ = None
        self.trace_vi_ = None
        self.random_state_ = check_random_state(self.random_state)
        self.loss_function_ = likelihood_dict.get(self.loss_function, None)
        self.threshold_ = 43e5
        if np.prod(X.shape) > self.threshold_:
            upper_bound = int(self.threshold_ / np.prod(X.shape[1:]))
            indices = self.random_state_.choice(X.shape[0],
                                                upper_bound,
                                                replace=False)
            X = X[indices, :, :]
            Y = Y[indices, :]
        logger.info(
            "Train Set instances {} objects {} features {}".format(*X.shape))
        with pm.Model() as self.model:
            self.Xt_ = theano.shared(X)
            self.Yt_ = theano.shared(Y)
            shapes = {
                "weights": self.n_object_features_fit_,
                "weights_ik": (self.n_object_features_fit_, self.n_nests),
            }
            weights_dict = create_weight_dictionary(self.model_configuration,
                                                    shapes)

            alpha_ik = tt.dot(self.Xt_, weights_dict["weights_ik"])
            alpha_ik = ttu.softmax(alpha_ik, axis=2)
            utility = tt.dot(self.Xt_, weights_dict["weights"])
            lambda_k = pm.Uniform("lambda_k",
                                  self.alpha,
                                  1.0,
                                  shape=self.n_nests)
            self.p_ = self.get_probabilities(utility, lambda_k, alpha_ik)
            LogLikelihood("yl",
                          loss_func=self.loss_function_,
                          p=self.p_,
                          observed=self.Yt_)
        logger.info("Model construction completed")
Esempio n. 2
0
 def construct_model(self, X, Y):
     with pm.Model() as self.model:
         self.Xt = theano.shared(X)
         self.Yt = theano.shared(Y)
         shapes = {'weights': (self.n_object_features, self.n_mixtures)}
         weights_dict = create_weight_dictionary(self.model_priors, shapes)
         utility = tt.dot(self.Xt, weights_dict['weights'])
         self.p = tt.mean(ttu.softmax(utility, axis=1), axis=2)
         yl = LogLikelihood('yl',
                            loss_func=self.loss_function,
                            p=self.p,
                            observed=self.Yt)
     self.logger.info("Model construction completed")
Esempio n. 3
0
    def construct_model(self, X, Y):
        """
            Constructs the nested logit model by applying priors on weight vectors **weights** and **weights_k** as per
            :meth:`model_configuration`. Then we apply a uniform prior to the :math:`\\lambda s`, i.e.
            :math:`\\lambda s \\sim Uniform(\\text{alpha}, 1.0)`.The probability of choosing the object :math:`x_i` from the
            query set :math:`Q = \\{x_1, \\ldots ,x_n\\}` is evaluated in :meth:`get_probabilities`.

            Parameters
            ----------
            X : numpy array
                (n_instances, n_objects, n_features)
                Feature vectors of the objects
            Y : numpy array
                (n_instances, n_objects)
                Preferences in the form of discrete choices for given objects

            Returns
            -------
             model : pymc3 Model :class:`pm.Model`
        """
        if np.prod(X.shape) > self.threshold:
            l = int(self.threshold / np.prod(X.shape[1:]))
            indices = self.random_state.choice(X.shape[0], l, replace=False)
            X = X[indices, :, :]
            Y = Y[indices, :]
        self.logger.info(
            "Train Set instances {} objects {} features {}".format(*X.shape))
        with pm.Model() as self.model:
            self.Xt = theano.shared(X)
            self.Yt = theano.shared(Y)
            shapes = {
                'weights': self.n_object_features,
                'weights_ik': (self.n_object_features, self.n_nests)
            }
            weights_dict = create_weight_dictionary(self.model_configuration,
                                                    shapes)

            alpha_ik = tt.dot(self.Xt, weights_dict['weights_ik'])
            alpha_ik = ttu.softmax(alpha_ik, axis=2)
            utility = tt.dot(self.Xt, weights_dict['weights'])
            lambda_k = pm.Uniform('lambda_k',
                                  self.alpha,
                                  1.0,
                                  shape=self.n_nests)
            self.p = self.get_probabilities(utility, lambda_k, alpha_ik)
            yl = LogLikelihood('yl',
                               loss_func=self.loss_function,
                               p=self.p,
                               observed=self.Yt)
        self.logger.info("Model construction completed")
Esempio n. 4
0
    def construct_model(self, X, Y):
        self.logger.info('Creating model_args config {}'.format(
            print_dictionary(self.model_priors)))
        with pm.Model() as self.model:
            self.Xt = theano.shared(X)
            self.Yt = theano.shared(Y)
            shapes = {'weights': self.n_object_features}
            # shapes = {'weights': (self.n_object_features, 3)}
            weights_dict = create_weight_dictionary(self.model_args, shapes)
            intercept = pm.Normal('intercept', mu=0, sd=10)
            utility = tt.dot(self.Xt, weights_dict['weights']) + intercept
            self.p = ttu.softmax(utility, axis=1)

            yl = LogLikelihood('yl',
                               loss_func=self.loss_function,
                               p=self.p,
                               observed=self.Yt)
        self.logger.info("Model construction completed")
Esempio n. 5
0
    def construct_model(self, X, Y):
        """
            Constructs the multinomial logit model which evaluated the utility score as :math:`U(x) = w \\cdot x`, where
            :math:`w` is the weight vector. The probability of choosing the object :math:`x_i` from the query set
            :math:`Q = \\{x_1, \\ldots ,x_n\\}` is:

            .. math::

                P_i = P(x_i \\lvert Q) = \\frac{exp(U(x_i))}{\\sum_{x_j \\in Q} exp(U(x_j))}

            Parameters
            ----------
            X : numpy array
                (n_instances, n_objects, n_features)
                Feature vectors of the objects
            Y : numpy array
                (n_instances, n_objects)
                Preferences in the form of discrete choices for given objects

            Returns
            -------
             model : pymc3 Model :class:`pm.Model`
        """
        self.trace_ = None
        self.trace_vi_ = None
        logger.info("Creating model_args config {}".format(
            print_dictionary(self.model_configuration)))
        self.loss_function_ = likelihood_dict.get(self.loss_function, None)
        with pm.Model() as self.model:
            self.Xt_ = theano.shared(X)
            self.Yt_ = theano.shared(Y)
            shapes = {"weights": self.n_object_features_fit_}
            # shapes = {'weights': (self.n_object_features_fit_, 3)}
            weights_dict = create_weight_dictionary(self.model_configuration,
                                                    shapes)
            intercept = pm.Normal("intercept", mu=0, sd=10)
            utility = tt.dot(self.Xt_, weights_dict["weights"]) + intercept
            self.p_ = ttu.softmax(utility, axis=1)

            LogLikelihood("yl",
                          loss_func=self.loss_function_,
                          p=self.p_,
                          observed=self.Yt_)
        logger.info("Model construction completed")
Esempio n. 6
0
    def construct_model(self, X, Y):
        """
            Constructs the mixed logit model by applying priors on weight vectors **weights** as per
            :meth:`model_configuration`. The probability of choosing the object :math:`x_i` from the query set
            :math:`Q = \\{x_1, \\ldots ,x_n\\}` assuming we draw :math:`R` samples of the weight vectors is:

            .. math::

                P(x_i \\lvert Q) = \\frac{1}{R} \\sum_{r=1}^R \\frac{exp(U_r(x_i))}{\\sum_{x_j \\in Q} exp(U_r(x_j))}

            Parameters
            ----------
            X : numpy array
                (n_instances, n_objects, n_features)
                Feature vectors of the objects
            Y : numpy array
                (n_instances, n_objects)
                Preferences in the form of discrete choices for given objects

            Returns
            -------
             model : pymc3 Model :class:`pm.Model`
        """
        self.trace_ = None
        self.trace_vi = None
        self.loss_function_ = likelihood_dict.get(self.loss_function, None)
        with pm.Model() as self.model:
            self.Xt_ = theano.shared(X)
            self.Yt_ = theano.shared(Y)
            shapes = {
                "weights": (self.n_object_features_fit_, self.n_mixtures)
            }
            weights_dict = create_weight_dictionary(self.model_configuration,
                                                    shapes)
            utility = tt.dot(self.Xt_, weights_dict["weights"])
            self.p_ = tt.mean(ttu.softmax(utility, axis=1), axis=2)
            LogLikelihood("yl",
                          loss_func=self.loss_function_,
                          p=self.p_,
                          observed=self.Yt_)
        logger.info("Model construction completed")
    def construct_model(self, X, Y):
        with pm.Model() as self.model:
            self.Xt = theano.shared(X)
            self.Yt = theano.shared(Y)
            shapes = {
                'weights': self.n_object_features,
                'weights_ik': (self.n_object_features, self.n_nests)
            }
            weights_dict = create_weight_dictionary(self.model_priors, shapes)

            alpha_ik = tt.dot(self.Xt, weights_dict['weights_ik'])
            alpha_ik = ttu.softmax(alpha_ik, axis=2)
            utility = tt.dot(self.Xt, weights_dict['weights'])
            lambda_k = pm.Uniform('lambda_k',
                                  self.alpha,
                                  1.0,
                                  shape=self.n_nests)
            self.p = self.get_probabilities(utility, lambda_k, alpha_ik)
            yl = LogLikelihood('yl',
                               loss_func=self.loss_function,
                               p=self.p,
                               observed=self.Yt)
        self.logger.info("Model construction completed")