def train_model(self): self.logger.info(self.evaluator.metrics_info()) self.user_pos_train = csr_to_user_dict_bytime( self.dataset.time_matrix, self.dataset.train_matrix) users_list, item_seq_list, item_pos_list = self._generate_sequences() for epoch in range(self.epochs): item_neg_list = self._sample_negative(users_list) data = DataIterator(users_list, item_seq_list, item_pos_list, item_neg_list, batch_size=self.batch_size, shuffle=True) for bat_user, bat_item_seq, bat_item_pos, bat_item_neg in data: feed = { self.user_ph: bat_user, self.item_seq_ph: bat_item_seq, self.item_pos_ph: bat_item_pos, self.item_neg_ph: bat_item_neg, self.is_training: True } self.sess.run(self.train_opt, feed_dict=feed) result = self.evaluate_model() self.logger.info("epoch %d:\t%s" % (epoch, result))
def __init__(self, sess, dataset, conf): super(Fossil, self).__init__(dataset, conf) self.verbose = conf["verbose"] self.batch_size = conf["batch_size"] self.num_epochs = conf["epochs"] self.embedding_size = conf["embedding_size"] regs = conf["regs"] self.lambda_bilinear = regs[0] self.gamma_bilinear = regs[1] self.reg_eta = regs[2] self.alpha = conf["alpha"] self.num_negatives = conf["num_neg"] self.learning_rate = conf["learning_rate"] self.learner = conf["learner"] self.loss_function = conf["loss_function"] self.is_pairwise = conf["is_pairwise"] self.high_order = conf["high_order"] self.num_negatives = conf["num_neg"] self.init_method = conf["init_method"] self.stddev = conf["stddev"] self.num_negatives = conf["num_neg"] self.num_users = dataset.num_users self.num_items = dataset.num_items self.dataset = dataset self.train_matrix = self.dataset.train_matrix self.train_dict = csr_to_user_dict_bytime(self.dataset.time_matrix, self.train_matrix) self.sess = sess
def __init__(self, sess, dataset, conf): super(FPMCplus, self).__init__(dataset, conf) self.learning_rate = conf["learning_rate"] self.embedding_size = conf["embedding_size"] self.weight_size = conf["weight_size"] self.learner = conf["learner"] self.loss_function = conf["loss_function"] self.is_pairwise = conf["is_pairwise"] self.num_epochs = conf["epochs"] self.reg_mf = conf["reg_mf"] self.reg_w = conf["reg_w"] self.batch_size = conf["batch_size"] self.high_order = conf["high_order"] self.verbose = conf["verbose"] self.embed_init_method = conf["embed_init_method"] self.weight_init_method = conf["weight_init_method"] self.stddev = float(conf["stddev"]) self.num_negatives = conf["num_neg"] self.num_users = dataset.num_users self.num_items = dataset.num_items self.dataset = dataset self.train_matrix = dataset.train_matrix self.train_dict = csr_to_user_dict_bytime(dataset.time_matrix, dataset.train_matrix) self.sess = sess
def get_user_train_dict(self, by_time=False): if by_time: train_dict = csr_to_user_dict_bytime(self.time_matrix, self.train_matrix) else: train_dict = csr_to_user_dict(self.train_matrix) return train_dict
def __init__(self, dataset, conf): super(SASRec, self).__init__(dataset, conf) train_matrix, time_matrix = dataset.train_matrix, dataset.time_matrix self.dataset = dataset self.users_num, self.items_num = train_matrix.shape self.lr = conf["lr"] self.l2_emb = conf["l2_emb"] self.hidden_units = conf["hidden_units"] self.batch_size = conf["batch_size"] self.epochs = conf["epochs"] self.dropout_rate = conf["dropout_rate"] self.max_len = conf["max_len"] self.num_blocks = conf["num_blocks"] self.num_heads = conf["num_heads"] self.dev = torch.device("cuda") self.user_pos_train = csr_to_user_dict_bytime(time_matrix, train_matrix) self.item_emb = torch.nn.Embedding(self.items_num + 1, self.hidden_units, padding_idx=0) self.pos_emb = torch.nn.Embedding(self.max_len, self.hidden_units) # TO IMPROVE self.emb_dropout = torch.nn.Dropout(p=self.dropout_rate) self.attention_layernorms = torch.nn.ModuleList( ) # to be Q for self-attention self.attention_layers = torch.nn.ModuleList() self.forward_layernorms = torch.nn.ModuleList() self.forward_layers = torch.nn.ModuleList() self.last_layernorm = torch.nn.LayerNorm(self.hidden_units, eps=1e-8) for _ in range(self.num_blocks): new_attn_layernorm = torch.nn.LayerNorm(self.hidden_units, eps=1e-8) self.attention_layernorms.append(new_attn_layernorm) new_attn_layer = torch.nn.MultiheadAttention( self.hidden_units, self.num_heads, self.dropout_rate) self.attention_layers.append(new_attn_layer) new_fwd_layernorm = torch.nn.LayerNorm(self.hidden_units, eps=1e-8) self.forward_layernorms.append(new_fwd_layernorm) new_fwd_layer = PointWiseFeedForward(self.hidden_units, self.dropout_rate) self.forward_layers.append(new_fwd_layer)
def __init__(self, sess, dataset, conf): super(NPE, self).__init__(dataset, conf) self.learning_rate = conf["learning_rate"] self.embedding_size = conf["embedding_size"] self.learner = conf["learner"] self.loss_function = conf["loss_function"] self.num_epochs = conf["epochs"] self.reg = conf["reg"] self.batch_size = conf["batch_size"] self.high_order = conf["high_order"] self.verbose = conf["verbose"] self.num_negatives = conf["num_neg"] self.init_method = conf["init_method"] self.stddev = conf["stddev"] self.num_users = dataset.num_users self.num_items = dataset.num_items self.dataset = dataset self.train_dict = csr_to_user_dict_bytime(dataset.time_matrix, dataset.train_matrix) self.sess = sess
def __init__(self, sess, dataset, conf): super(SASRec, self).__init__(dataset, conf) train_matrix, time_matrix = dataset.train_matrix, dataset.time_matrix self.dataset = dataset self.users_num, self.items_num = train_matrix.shape self.lr = conf["lr"] self.l2_reg = conf["l2_reg"] self.embedding_size = conf["embedding_size"] self.batch_size = conf["batch_size"] self.epochs = conf["epochs"] self.dropout_rate = conf["dropout_rate"] self.max_len = conf["max_len"] self.num_blocks = conf["num_blocks"] self.num_heads = conf["num_heads"] self.user_pos_train = csr_to_user_dict_bytime(time_matrix, train_matrix) self.sess = sess
def __init__(self, sess, dataset, conf): super(FPMC, self).__init__(dataset, conf) self.lr = conf["lr"] self.embedding_size = conf["embedding_size"] self.learner = conf["learner"] self.loss_function = conf["loss_function"] self.is_pairwise = conf["is_pairwise"] self.topK = conf["topk"] self.num_epochs = conf["epochs"] self.reg_mf = conf["reg_mf"] self.batch_size = conf["batch_size"] self.init_method = conf["init_method"] self.stddev = conf["stddev"] self.verbose = conf["verbose"] self.num_negatives = conf["num_neg"] self.num_users = dataset.num_users self.num_items = dataset.num_items self.dataset = dataset self.train_matrix = dataset.train_matrix self.train_dict = csr_to_user_dict_bytime(dataset.time_matrix, dataset.train_matrix) self.sess = sess