def get_raw_data(dataset_name): raw_data = None if dataset_name == "amazon_book": raw_data = dataloader.load_amazon_book() elif dataset_name == "citeulike": raw_data = dataloader.load_citeulike() elif dataset_name == "tradesy": raw_data = dataloader.load_tradesy() else: print("Bad dataset name.") exit() return raw_data
from openrec import ModelTrainer from openrec.utils import Dataset from BPR import BPR from openrec.utils.evaluators import AUC from openrec.utils.samplers import RandomPairwiseSampler from openrec.utils.samplers import EvaluationSampler import dataloader raw_data = dataloader.load_citeulike() dim_embed = 100 total_iter = 10000 batch_size = 1000 eval_iter = 10000 save_iter = eval_iter train_dataset = Dataset(raw_data['train_data'], raw_data['total_users'], raw_data['total_items'], name='Train') val_dataset = Dataset(raw_data['val_data'], raw_data['total_users'], raw_data['total_items'], name='Val', num_negatives=500) test_dataset = Dataset(raw_data['test_data'], raw_data['total_users'], raw_data['total_items'], name='Test', num_negatives=500) train_sampler = RandomPairwiseSampler(batch_size=batch_size, dataset=train_dataset, num_process=5) val_sampler = EvaluationSampler(batch_size=batch_size, dataset=val_dataset) test_sampler = EvaluationSampler(batch_size=batch_size, dataset=test_dataset) bpr_model = BPR(batch_size=batch_size, total_users=train_dataset.total_users(), total_items=train_dataset.total_items(), l2_reg=0.01, dim_user_embed=dim_embed, dim_item_embed=dim_embed, save_model_dir='bpr_recommender/', train=True, serve=True) model_trainer = ModelTrainer(model=bpr_model) auc_evaluator = AUC() model_trainer.train(total_iter=total_iter, eval_iter=eval_iter, save_iter=save_iter, train_sampler=train_sampler,
from openrec.tf2.data import Dataset from openrec.tf2.recommenders import BPR from openrec.tf2.metrics import AUC, NDCG, Recall, DictMean from tqdm.auto import tqdm import tensorflow as tf import numpy as np from tensorflow.keras import optimizers import dataloader raw_data = dataloader.load_citeulike('../dataset/') dim_embed = 50 total_iter = int(1e5) batch_size = 1000 eval_interval = 1000 save_interval = eval_interval train_dataset = Dataset(raw_data=raw_data['train_data'], total_users=raw_data['total_users'], total_items=raw_data['total_items']) val_dataset = Dataset(raw_data=raw_data['val_data'], total_users=raw_data['total_users'], total_items=raw_data['total_items']) bpr_model = BPR(total_users=raw_data['total_users'], total_items=raw_data['total_items'], dim_user_embed=dim_embed, dim_item_embed=dim_embed) optimizer = optimizers.Adam()