def __init__(self, data, config, params, *args, **kwargs):
        self._random = np.random

        self._params_list = [
            ("_learning_rate", "lr", "lr", 0.0001, None, None),
            ("_user_mlp", "user_mlp", "umlp", "(64,32)",
             lambda x: list(make_tuple(str(x))),
             lambda x: self._batch_remove(str(x), " []").replace(",", "-")),
            ("_item_mlp", "item_mlp", "imlp", "(64,32)",
             lambda x: list(make_tuple(str(x))),
             lambda x: self._batch_remove(str(x), " []").replace(",", "-")),
            ("_neg_ratio", "neg_ratio", "negratio", 5, None, None),
            ("_reg", "reg", "reg", 0.001, None, None),
            ("_similarity", "similarity", "sim", "cosine", None, None)
        ]
        self.autoset_params()

        self._max_ratings = np.max(self._data.sp_i_train_ratings)
        self._transactions_per_epoch = self._data.transactions + self._neg_ratio * self._data.transactions

        if self._batch_size < 1:
            self._batch_size = self._data.transactions + self._neg_ratio * self._data.transactions

        self._sampler = pws.Sampler(self._data.i_train_dict,
                                    self._data.sp_i_train_ratings,
                                    self._neg_ratio)

        self._ratings = self._data.train_dict
        self._sp_i_train = self._data.sp_i_train
        self._i_items_set = list(range(self._num_items))

        self._model = DeepMatrixFactorizationModel(
            self._num_users, self._num_items, self._user_mlp, self._item_mlp,
            self._reg, self._similarity, self._max_ratings,
            self._data.sp_i_train_ratings, self._learning_rate)
    def __init__(self, data, config, params, *args, **kwargs):
        """

        Create a NAIS instance.
        (see https://arxiv.org/pdf/1809.07053.pdf for details about the algorithm design choices).

        """
        self._random = np.random

        self._params_list = [
            ("_factors", "factors", "factors", 100, None, None),
            ("_algorithm", "algorithm", "algorithm", "concat", None, None),
            ("_weight_size", "weight_size", "weight_size", 32, None, None),
            ("_lr", "lr", "lr", 0.001, None, None),
            ("_l_w", "l_w", "l_w", 0.001, None, None),
            ("_l_b", "l_b", "l_b", 0.001, None, None),
            ("_alpha", "alpha", "alpha", 0.5, lambda x: min(max(0, x), 1),
             None), ("_beta", "beta", "beta", 0.5, None, None),
            ("_neg_ratio", "neg_ratio", "neg_ratio", 0.5, None, None)
        ]
        self.autoset_params()

        if self._batch_size < 1:
            self._batch_size = self._data.transactions

        self._ratings = self._data.train_dict

        self._sampler = pws.Sampler(self._data.i_train_dict,
                                    self._data.sp_i_train_ratings,
                                    self._neg_ratio)

        self._model = NAIS_model(self._data, self._algorithm,
                                 self._weight_size, self._factors, self._lr,
                                 self._l_w, self._l_b, self._alpha, self._beta,
                                 self._num_users, self._num_items)
    def __init__(self, data, config, params, *args, **kwargs):
        """

        Create a FISM instance.
        (see http://glaros.dtc.umn.edu/gkhome/node/1068 for details about the algorithm design choices).

        Args:
            data: data loader object
            params: model parameters {_factors: embedding size,
                                      [l_w, l_b]: regularization,
                                      lr: learning rate}
        """
        self._random = np.random

        self._params_list = [
            ("_factors", "factors", "factors", 100, None, None),
            ("_lr", "lr", "lr", 0.001, None, None),
            ("_l_w", "l_w", "l_w", 0.001, None, None),
            ("_l_b", "l_b", "l_b", 0.001, None, None),
            ("_alpha", "alpha", "alpha", 0.5, lambda x: min(max(0, x), 1),
             None),
            ("_neg_ratio", "neg_ratio", "neg_ratio", 0.5, None, None),
        ]
        self.autoset_params()

        if self._batch_size < 1:
            self._batch_size = self._data.transactions

        self._ratings = self._data.train_dict

        self._sampler = pws.Sampler(self._data.i_train_dict,
                                    self._data.sp_i_train_ratings,
                                    self._neg_ratio)

        self._model = FISM_model(self._data, self._factors, self._lr,
                                 self._l_w, self._l_b, self._alpha,
                                 self._num_users, self._num_items)