def get_feed_dict(self, corpus, data, batch_start, batch_size, phase): feed_dict = BPR.get_feed_dict(self, corpus, data, batch_start, batch_size, phase) real_batch_size = feed_dict['batch_size'] times = data['time'][batch_start:batch_start + real_batch_size].values time_ids = (times - self.min_time) // self.time_bin_width feed_dict['time_id'] = utils.numpy_to_torch(time_ids).long() return feed_dict
def parse_model_args(parser): parser.add_argument('--layers', type=str, default='[64]', help="Size of each layer.") return BPR.parse_model_args(parser)
def parse_model_args(parser): parser.add_argument('--time_bin', type=int, default=100, help='Number of time bins.') return BPR.parse_model_args(parser)
def _define_params(self): BPR._define_params(self) self.u_t_embeddings = torch.nn.Embedding(self.time_bin, self.emb_size) self.i_t_embeddings = torch.nn.Embedding(self.time_bin, self.emb_size) self.embeddings.extend(['u_t_embeddings', 'i_t_embeddings'])
def __init__(self, args, corpus): self.time_bin = args.time_bin self.min_time = corpus.min_time self.time_bin_width = (corpus.max_time - self.min_time + 1.) / self.time_bin BPR.__init__(self, args, corpus)
# -*- coding: utf-8 -*- from models.BPR import BPR from readers.naisdataloader import Dataloader from configs.config import Config config = Config() dl = Dataloader(config) bpr = BPR(config, dl) bpr.train_and_evaluate()
def __init__(self, args, corpus): self.layers = eval(args.layers) self.dropout = args.dropout BPR.__init__(self, args, corpus)