def test_movie_lens_fit(self): """ This test checks whether the movielens getter works and that the resulting data is viable for fitting/testing a TensorRec model. """ train_interactions, test_interactions, user_features, item_features = get_movielens_100k( ) model = TensorRec() model.fit(interactions=train_interactions, user_features=user_features, item_features=item_features) predictions = model.predict(user_features=user_features, item_features=item_features) self.assertIsNotNone(predictions)
from tensorrec import TensorRec from tensorrec.eval import fit_and_eval from tensorrec.representation_graphs import ( LinearRepresentationGraph, NormalizedLinearRepresentationGraph ) from tensorrec.loss_graphs import BalancedWMRBLossGraph from test.datasets import get_movielens_100k import logging logging.getLogger().setLevel(logging.INFO) # Load the movielens dataset train_interactions, test_interactions, user_features, item_features, _ = get_movielens_100k(negative_value=0) # Construct parameters for fitting epochs = 500 alpha = 0.00001 n_components = 10 verbose = True learning_rate = .01 n_sampled_items = int(item_features.shape[0] * .1) fit_kwargs = {'epochs': epochs, 'alpha': alpha, 'verbose': verbose, 'learning_rate': learning_rate, 'n_sampled_items': n_sampled_items} # Build two models -- one without an attention graph, one with a linear attention graph model_without_attention = TensorRec( n_components=10, n_tastes=3, user_repr_graph=NormalizedLinearRepresentationGraph(), attention_graph=None,
import numpy as np from tensorrec import TensorRec from tensorrec.eval import precision_at_k, recall_at_k from tensorrec.loss_graphs import BalancedWMRBLossGraph from tensorrec.prediction_graphs import DotProductPredictionGraph from tensorrec.representation_graphs import NormalizedLinearRepresentationGraph from test.datasets import get_movielens_100k import logging logging.getLogger().setLevel(logging.INFO) # Load the movielens dataset train_interactions, test_interactions, user_features, item_features, item_titles = \ get_movielens_100k(negative_value=-1.0) # Assemble parameters for fitting. 'epochs' is 1 in the fit_kwargs because we will be calling fit_partial 1000 times to # run 1000 epochs. epochs = 1000 fit_kwargs = {'epochs': 1, 'alpha': 0.0001, 'verbose': True, 'learning_rate': .01, 'n_sampled_items': int(item_features.shape[0] * .1)} # Build the TensorRec model model = TensorRec(n_components=2, biased=True, loss_graph=BalancedWMRBLossGraph(), prediction_graph=DotProductPredictionGraph(), user_repr_graph=NormalizedLinearRepresentationGraph(), normalize_users=True, normalize_items=True,
from tensorrec.representation_graphs import ( LinearRepresentationGraph, ReLURepresentationGraph, NormalizedLinearRepresentationGraph) from tensorrec.loss_graphs import WMRBLossGraph, BalancedWMRBLossGraph from tensorrec.prediction_graphs import (DotProductPredictionGraph, CosineSimilarityPredictionGraph, EuclidianSimilarityPredictionGraph) from tensorrec.util import append_to_string_at_point from test.datasets import get_movielens_100k import logging logging.getLogger().setLevel(logging.INFO) # Load the movielens dataset train_interactions, test_interactions, user_features, item_features, _ = get_movielens_100k( negative_value=0) # Construct parameters for fitting epochs = 300 alpha = 0.00001 n_components = 10 verbose = True learning_rate = .01 n_sampled_items = int(item_features.shape[0] * .1) biased = False fit_kwargs = { 'epochs': epochs, 'alpha': alpha, 'verbose': verbose, 'learning_rate': learning_rate, 'n_sampled_items': n_sampled_items
def setUpClass(cls): cls.movielens_100k = get_movielens_100k()
import numpy as np from tensorrec import TensorRec from tensorrec.eval import precision_at_k, recall_at_k from tensorrec.input_utils import create_tensorrec_dataset_from_sparse_matrix from tensorrec.loss_graphs import BalancedWMRBLossGraph from tensorrec.representation_graphs import ReLURepresentationGraph from test.datasets import get_movielens_100k import logging logging.getLogger().setLevel(logging.INFO) # Load the movielens dataset train_interactions, test_interactions, user_features, item_features, item_titles = \ get_movielens_100k(negative_value=-1.0) # Assemble parameters for fitting. 'epochs' is 1 in the fit_kwargs because we will be calling fit_partial 1000 times to # run 1000 epochs. epochs = 1000 fit_kwargs = {'epochs': 1, 'alpha': 0.0001, 'verbose': True, 'learning_rate': .01, 'n_sampled_items': int(item_features.shape[0] * .1)} # Build the TensorRec model model = TensorRec(n_components=2, biased=False, loss_graph=BalancedWMRBLossGraph(), item_repr_graph=ReLURepresentationGraph(), n_tastes=3) # Make some random selections of movies and users we want to plot
import keras as ks from tensorrec import TensorRec from tensorrec.eval import fit_and_eval from tensorrec.representation_graphs import AbstractKerasRepresentationGraph from tensorrec.loss_graphs import SeparationDenseLossGraph from test.datasets import get_movielens_100k import logging logging.getLogger().setLevel(logging.INFO) train_interactions, test_interactions, user_features, item_features, _ = get_movielens_100k( ) class ExampleKerasRepresentationGraph(AbstractKerasRepresentationGraph): def create_layers(self, n_features, n_components): return [ ks.layers.Dense(int(n_features / 2), activation='relu'), ks.layers.Dense(n_components * 2, activation='relu'), ks.layers.Dense(n_components, activation='tanh'), ] model = TensorRec(n_components=10, item_repr_graph=ExampleKerasRepresentationGraph(), loss_graph=SeparationDenseLossGraph()) fit_kwargs = {'epochs': 1000, 'learning_rate': .001, 'verbose': True}