Example #1
0
    def fit(
        self,
        X,
        Y,
        sampler="variational",
        tune=500,
        draws=500,
        vi_params={
            "n": 20000,
            "method": "advi",
            "callbacks": [CheckParametersConvergence()],
        },
        **kwargs,
    ):
        """
        Fit a nested logit model on the provided set of queries X and choices Y of those objects. The provided
        queries and corresponding preferences are of a fixed size (numpy arrays). For learning this network the
        categorical cross entropy loss function for each object :math:`x_i \\in Q` is defined as:

        .. math::

            C_{i} =  -y(i)\\log(P_i) \\enspace,

        where :math:`y` is ground-truth discrete choice vector of the objects in the given query set :math:`Q`.
        The value :math:`y(i) = 1` if object :math:`x_i` is chosen else :math:`y(i) = 0`.

        Parameters
        ----------
        X : numpy array (n_instances, n_objects, n_features)
            Feature vectors of the objects
        Y : numpy array (n_instances, n_objects)
            Choices for given objects in the query
        sampler : {‘variational’, ‘metropolis’, ‘nuts’}, string
            The sampler used to estimate the posterior mean and mass matrix from the trace

                * **variational** : Run inference methods to estimate posterior mean and diagonal mass matrix
                * **metropolis** : Use the MAP as starting point and Metropolis-Hastings sampler
                * **nuts** : Use the No-U-Turn sampler
        vi_params : dict
            The parameters for the **variational** inference method
        draws : int
            The number of samples to draw. Defaults to 500. The number of tuned samples are discarded by default
        tune : int
            Number of iterations to tune, defaults to 500. Ignored when using 'SMC'. Samplers adjust
            the step sizes, scalings or similar during tuning. Tuning samples will be drawn in addition
            to the number specified in the `draws` argument, and will be discarded unless
            `discard_tuned_samples` is set to False.
        **kwargs :
            Keyword arguments for the fit function of :meth:`pymc3.fit`or :meth:`pymc3.sample`
        """
        self._pre_fit()
        _n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
        if self.n_nests is None:
            self.n_nests = int(self.n_objects_fit_ / 2)
        self.random_state_ = check_random_state(self.random_state)
        self.construct_model(X, Y)
        fit_pymc3_model(self, sampler, draws, tune, vi_params, **kwargs)
        return self
 def _fit(self,
          X,
          Y,
          sampler='variational',
          tune=500,
          draws=500,
          vi_params={
              "n": 20000,
              "method": "advi",
              "callbacks": [CheckParametersConvergence()]
          },
          **kwargs):
     self.construct_model(X, Y)
     fit_pymc3_model(self, sampler, draws, tune, vi_params, **kwargs)
Example #3
0
 def _fit(
     self,
     X,
     Y,
     sampler="variational",
     tune=500,
     draws=500,
     vi_params={
         "n": 20000,
         "method": "advi",
         "callbacks": [CheckParametersConvergence()],
     },
     **kwargs,
 ):
     _n_instances, self.n_objects_fit_, self.n_object_features_fit_ = X.shape
     self.construct_model(X, Y)
     fit_pymc3_model(self, sampler, draws, tune, vi_params, **kwargs)