def __init__(self, max_rank, lmbda, max_iter=200, verbose=False): AlgoBase.__init__(self) self.max_rank = max_rank self.lmbda = lmbda self.max_iter = max_iter self.verbose = verbose
def __init__(self, train_data): AlgoBase.__init__(self) self.model_selection = [[ 'baselineonly', BaselineOnly(bsl_options={ 'method': 'als', 'n_epochs': 25, 'reg_u': 5, 'reg_i': 3 }) ], ['svd', SVD(lr_all=0.01, n_epochs=25, reg_all=0.2)], [ 'coClustering', CoClustering(n_epochs=3, n_cltr_u=3, n_cltr_i=3) ], [ 'knn', KNNBasic(k=40, sim_options={ 'name': 'cosine', 'user_based': False }) ]] self.model_rmse = {} self.model_list = {} self.trainset = train_data.build_full_trainset()
def __init__(self, algorithms, weights): AlgoBase.__init__(self) if (sum(weights.values()) - 1) != 0: raise Exception("Attention, sum of weights need to be 1") # print("in constructor, algos:", type(algorithms)) self.algorithms = algorithms self.weights = weights
def __init__(self, train): # Parameter Of The Model # Always call base method before doing anything. AlgoBase.__init__(self) self.training_args = config['training_args'] # instantiate the column index for both user and items self.indexer = ColumnIndexer(train, ['userId', 'itemId']) # index the train set self.train = self.indexer.transform(train) # get the number of distinct users and items self.number_of_users = len(set(train['userId'].values)) self.number_of_items = len(set(train['itemId'].values)) # create user item rating tuples train_users_items_ratings = ((train['userId' + '_indexed'].values, train['itemId' + '_indexed'].values), train['rating'].values) # instantiate the tf datasets self.train_dataset = tf.data.Dataset.from_tensor_slices( train_users_items_ratings) self.ncf = NeuralCF( self.number_of_users, self.number_of_items, self.training_args.user_dim, self.training_args.item_dim, self.training_args.hidden1_dim, self.training_args.hidden2_dim, self.training_args.hidden3_dim, self.training_args.hidden4_dim) self.ncf.compile( optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), loss=tf.keras.losses.MeanAbsoluteError())
def __init__(self, n_epochs=20, num_features=20, lambda_user=0.1, lambda_item=0.7, lambda_all=None, min_num_ratings=1, verbose=False, debug=False): self.num_features = num_features self.lambda_user = lambda_user if lambda_all == None else lambda_all self.lambda_item = lambda_item if lambda_all == None else lambda_all self.min_num_ratings = min_num_ratings self.n_epochs = n_epochs self.debug = debug AlgoBase.__init__(self)
def __init__(self, epochs=20, hiddenDim=100, learningRate=0.001, batchSize=100, sim_options={}): AlgoBase.__init__(self) self.epochs = epochs self.hiddenDim = hiddenDim self.learningRate = learningRate self.batchSize = batchSize self.ml = MovieLens() self.ml.loadMovieLensLatestSmall() self.stoplist = ["sex", "drugs", "rock n roll"]
def __init__(self, components): """ Constructor for WeightedHybrid :param components: The list of components to include in the hybrid :param normalize: If True, then weights are normalized to sum to 1 when set. """ AlgoBase.__init__(self) # Set instance variables self._components = components
def __init__(self, epochs=10, hidden=100, lr=0.01, batch_size=100, params={}): AlgoBase.__init__(self) self.epochs = epochs self.hidden = hidden self.lr = lr self.batch_size = batch_size
def __init__(self, sim_options, social_file, n_factor=100, n_epochs=10, lr=.005, reg=.02, alpha=0.5): self.n_factor = n_factor self.n_epochs = n_epochs self.lr = lr self.reg = reg self.alpha = alpha self.social_file = social_file AlgoBase.__init__(self, sim_options=sim_options)
def __init__(self, epochs=20, hiddenDim=100, learningRate=0.001, batchSize=100, sim_options={}): AlgoBase.__init__(self) self.epochs = epochs self.hiddenDim = hiddenDim self.learningRate = learningRate self.batchSize = batchSize
def __init__(self, epochs=20, hidden_dim=100, learning_rate=0.001, batch_size=100, sim_options={}): AlgoBase.__init__(self, sim_options=sim_options) self.epochs = epochs self.hidden_dim = hidden_dim self.learning_rate = learning_rate self.batch_size = batch_size
def __init__(self, prefered="item", wi=1.0, wu=1.0, wg=1.0): AlgoBase.__init__(self) if prefered in ["item", "user", "global", "mixed"]: self.prefered = prefered else: self.prefered = "mixed" self.wi = wi self.wu = wu self.wg = wg
def __init__(self, cat_products, cat_target): """ Cette fonction décide de manière aléatoire pondéré par la fréquence cible pour chaque utilisateur dans quelle catégorie tirer les résultats. """ AlgoBase.__init__(self) # Le modèle qui nous donne les \hat{r}_ij. self.SVD = SVD() # Les informations pour la partnership self.cat_products = cat_products self.cat_target = cat_target
def __init__(self, cat_products, cat_target): """ Cette méthode consiste à recommander peu à peu des objets en prenant à chaque fois l'objet avec la meilleure similarité dans la catégorie des objets qui est le plus loin de sa valeur cible en proportion parmi les résultat déjà obtenus """ AlgoBase.__init__(self) # Le modèle qui nous donne les \hat{r}_ij. self.SVD = SVD() # Les informations pour la partnership self.cat_products = cat_products self.cat_target = cat_target
def __init__(self, k=3, alpha=2, lambda1=10, step_size=0.0001, n_epoch=30, random_state=None): AlgoBase.__init__(self) self.k = k self.alpha = alpha self.lambda1 = lambda1 self.step_size = step_size self.n_epoch = n_epoch self.random_state = random_state
def __init__(self, n_components=20, max_iter=100, batch_size=1024, init_std=0.01, n_jobs=8, random_state=None, early_stopping=False, verbose=False): AlgoBase.__init__(self) self.clf = ExpoMF(n_components=n_components, max_iter=max_iter, init_std=init_std, random_state=random_state)
def __init__(self, n_components=20, max_iter=100, init_std=0.01, R=0.1, alpha=5, random_state=None): AlgoBase.__init__(self) self.clf = MaxNorm(n_components=n_components, max_iter=max_iter, init_std=init_std, R=R, alpha=alpha, random_state=random_state)
def __init__(self, max_rank, lmbda, max_iter=200, min_value=None, max_value=None, verbose=False): AlgoBase.__init__(self) self.max_rank = max_rank self.lmbda = lmbda self.max_iter = max_iter self.verbose = verbose self.min_value = min_value self.max_value = max_value
def __init__(self, k=40, sim_options={}, verbose=False): """ Init a ContentKNNAlgorithm class to calculate item based recommendations. Parameters ---------- k: int Number of nearest neighbors sim_options: dict A dict object contains similarity options for AlgoBase verbose: bool Verbosity of fit method. """ AlgoBase.__init__(self, sim_options=sim_options) self.k = k self.verbose = verbose
def __init__(self, algorithms, weights): """ constructor: initialize our algorithms and weights Args: param1:algorithms: the algorithms used for hybrid param2:weights: the weight added to the hybrid algorithm """ AlgoBase.__init__(self) if (sum(weights.values()) - 1) != 0: raise Exception("Attention, sum of weights need to be 1") # print("in constructor, algos:", type(algorithms)) self.algorithms = algorithms self.weights = weights
def __init__(self, n_epochs=20, biased=True, lr_all=.005, reg_all=.02, lr_bu=None, lr_bi=None, reg_bu=None, reg_bi=None, verbose=True): self.n_epochs = n_epochs self.biased = biased self.lr_bu = lr_bu if lr_bu is not None else lr_all self.lr_bi = lr_bi if lr_bi is not None else lr_all self.reg_bu = reg_bu if reg_bu is not None else reg_all self.reg_bi = reg_bi if reg_bi is not None else reg_all self.verbose = verbose AlgoBase.__init__(self)
def __init__(self, max_rank, lmbda, propensity_scores='1bitmc', max_iter=200, one_bit_mc_max_rank=None, one_bit_mc_tau=1., one_bit_mc_gamma=1., verbose=False): AlgoBase.__init__(self) self.max_rank = max_rank self.lmbda = lmbda self.max_iter = max_iter self.propensity_scores = propensity_scores self.one_bit_mc_max_rank = one_bit_mc_max_rank self.one_bit_mc_tau = one_bit_mc_tau self.one_bit_mc_gamma = one_bit_mc_gamma self.verbose = verbose
def __init__(self, user_dataset, item_dataset): # Always call base method before doing anything. AlgoBase.__init__(self) self.Rmin = 1 self.Rmax = 5 self.Rmid = (self.Rmin + self.Rmax) / 2.0 # Movie id with title self.movies = {} for line in open(item_dataset): (id, title) = line.split('|')[0:2] self.movies[id] = title # Load users data self.userData = {} for line in open(user_dataset): (user, movieid, rating, ts) = line.split('\t') self.userData.setdefault(user, {}) self.userData[user][self.movies[movieid]] = float(rating) # Creating User list self.UsersList = [i for i in self.userData]
def __init__( self, l1_ratio=0.1, eps=1e-3, n_alphas=10, alphas=None, positive=True, max_iter=100): # Always call base method before doing anything. AlgoBase.__init__(self) self.W = None self.A = None self.trainset = None self.the_mean = 0 self.trainset = None self.l1_ratio = l1_ratio self.eps = eps self.n_alphas = n_alphas self.max_iter = max_iter self.alphas = alphas self.positive = positive
def __init__(self, k=3, alpha=2, lambda1=10, step_size=0.0001, n_epoch=30, propensity_scores='1bitmc', one_bit_mc_max_rank=None, one_bit_mc_tau=1., one_bit_mc_gamma=1., verbose=False, random_state=None): AlgoBase.__init__(self) self.k = k self.alpha = alpha self.lambda1 = lambda1 self.step_size = step_size self.n_epoch = n_epoch self.propensity_scores = propensity_scores self.one_bit_mc_max_rank = one_bit_mc_max_rank self.one_bit_mc_tau = one_bit_mc_tau self.one_bit_mc_gamma = one_bit_mc_gamma self.verbose = verbose self.random_state = random_state
def __init__(self, k=40, min_k=1, alog=KNNWithMeans, user_based=True, beta=2.5, epsilon=0.9, lambdak=0.9, sim_options={}, verbose=True, **kwargs): # Always call base method before doing anything. AlgoBase.__init__(self) self.k = k self.min_k = min_k self.algo = alog(k=k, sim_options=sim_options, verbose=verbose) self.epsilon = epsilon self.lambdak = lambdak self.beta = beta if user_based: self.ptype = 'user' else: self.ptype = 'item'
def __init__(self, n_factors=100, n_epochs=20, biased=True, init_mean=0, init_std_dev=.1, lr_all=.025, reg_all=.02, lr_bu=None, lr_bi=None, lr_pu=None, lr_qi=None, reg_bu=None, reg_bi=None, reg_pu=None, reg_qi=None, random_state=None, verbose=False): self.n_factors = n_factors self.n_epochs = n_epochs self.biased = biased self.init_mean = init_mean self.init_std_dev = init_std_dev self.lr_bu = lr_bu if lr_bu is not None else lr_all self.lr_bi = lr_bi if lr_bi is not None else lr_all self.lr_pu = lr_pu if lr_pu is not None else lr_all self.lr_qi = lr_qi if lr_qi is not None else lr_all self.reg_bu = reg_bu if reg_bu is not None else reg_all self.reg_bi = reg_bi if reg_bi is not None else reg_all self.reg_pu = reg_pu if reg_pu is not None else reg_all self.reg_qi = reg_qi if reg_qi is not None else reg_all self.random_state = random_state self.verbose = verbose AlgoBase.__init__(self)
def __init__(self, n_components=20, max_iter=100, propensity_scores='1bitmc', one_bit_mc_max_rank=None, one_bit_mc_tau=1., one_bit_mc_gamma=1., init_std=0.01, R=0.1, alpha=5, verbose=False, random_state=None): AlgoBase.__init__(self) self.propensity_scores = propensity_scores self.one_bit_mc_max_rank = one_bit_mc_max_rank self.one_bit_mc_tau = one_bit_mc_tau self.one_bit_mc_gamma = one_bit_mc_gamma self.verbose = verbose self.clf = WeightedMaxNorm(n_components=n_components, max_iter=max_iter, init_std=init_std, R=R, alpha=alpha, random_state=random_state)
def __init__(self, oas, k=10, sim_options={}): AlgoBase.__init__(self) self.k = k self.oas = oas
def __init__(self, sim_options={}, bsl_options={}): AlgoBase.__init__(self, sim_options=sim_options, bsl_options=bsl_options)