Ejemplo n.º 1
0
 def test_FullMTL(self):
     metrics = ['rmse']
     ddataset = SD.SyntheticDataCreator(num_tasks=3,
                                        cellsPerTask=500,
                                        drugsPerTask=10,
                                        function="gauss",
                                        normalize=False,
                                        noise=1,
                                        graph=False,
                                        test_split=0.3)
     dataset.prepare_data()
     exp_folder = "fullMtlGP_test"
     exp = ModelTraining(exp_folder)
     methods = [
         MtlGP.GPyFullMTL(num_iters=50,
                          length_scale=20,
                          noise_covar=.9,
                          n_inducing_points=500,
                          num_tasks=3)
     ]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 1.5  # arbitrary threshold for reasonable prediction
Ejemplo n.º 2
0
 def test_NonNegative_MF(self):
     exp_folder = "svdnnmf_test"
     exp = ModelTraining(exp_folder)
     methods = [NonNegative_MF(n_factors=30)]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 1.5  # arbitrary threshold for reasonable prediction
Ejemplo n.º 3
0
 def test_KNNBasic(self):
     exp_folder = "knnbasic_test"
     exp = ModelTraining(exp_folder)
     methods = [KNN_Basic(k=10)]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 1.5  # arbitrary threshold for reasonable prediction
Ejemplo n.º 4
0
 def test_HadamardGP(self):
     exp_folder = "HadamardGP_test"
     exp = ModelTraining(exp_folder)
     methods = [MtlGP.HadamardMTL(num_iters=50, length_scale=20, noise_covar=.9, n_inducing_points=500, \
                             composite=False, learning_rate=.1, validate=False)]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 1.5  # arbitrary threshold for reasonable prediction
Ejemplo n.º 5
0
 def test_ExactGP(self):
     exp_folder = "exactGP_test"
     exp = ModelTraining(exp_folder)
     methods = [
         ExactGPRegression(num_iters=10, length_scale=50, noise_covar=1.5)
     ]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 1.5  # arbitrary threshold for reasonable prediction
Ejemplo n.º 6
0
 def test_NCF_MTL_MF(self):
     hyperparams_mtlmf = {'batch_size': 64, 'epochs': 150, 'layers': '[64,32,16,8]', \
            'learner': 'adam', 'lr': .001,'mf_lr': .001, 'num_factors': 10, \
            'reg_layers': '[0,0,0,.01]', 'reg_mf': 0.01, 'verbose': 1}
     methods = [
         NCF_MTL.Neural_Collaborative_Filtering_FeaturesMTLMF(
             hyperparams_mtlmf, 'NCF_MTL_MF', 'feature_based')
     ]
     exp_folder = "ncf_test_MF"
     exp = ModelTraining(exp_folder)
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 2  # arbitrary threshold for reasonable prediction
Ejemplo n.º 7
0
 def test_NCF(self):
     hyperparams = {'batch_size': 32, 'epochs': 200, 'layers': '[64,32,16,8]', \
                'learner': 'rmsprop', 'lr': 0.001, 'num_factors': 8, 'num_neg': 4, \
                'reg_layers': '[0,0,0,0]', 'reg_mf': 0.0, 'verbose': 1, 'warm_start':False}
     exp_folder = "ncf_test"
     exp = ModelTraining(exp_folder)
     methods = [
         Neural_Collaborative_Filtering(hyperparams,
                                        'Ratings matrix NCF',
                                        'non_feature_based',
                                        warm_start=False)
     ]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 2  # arbitrary threshold for reasonable prediction
Ejemplo n.º 8
0
 def test_SparseCompGP(self):
     exp_folder = "sparseCompGP_test"
     exp = ModelTraining(exp_folder)
     methods = [
         SparseGPCompositeKernelRegression(num_iters=15,
                                           learning_rate=1e-1,
                                           noise_covar=1.0,
                                           length_scale_cell=30.0,
                                           output_scale_cell=1.0,
                                           length_scale_drug=30.0,
                                           output_scale_drug=1.0)
     ]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 1.5  # arbitrary threshold for reasonable prediction
Ejemplo n.º 9
0
 def test_NN(self):
     ddataset = SD.SyntheticDataCreator(num_tasks=3,
                                        cellsPerTask=400,
                                        drugsPerTask=10,
                                        function="cosine",
                                        normalize=True,
                                        noise=1,
                                        graph=False,
                                        test_split=0.3)
     dataset.prepare_data()
     exp_folder = "NN_test"
     exp = ModelTraining(exp_folder)
     methods = [FeedForwardNN([25, 25], 'relu', epochs=60, lr=1e-3)]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 1.5  # arbitrary threshold for reasonable prediction
Ejemplo n.º 10
0
 def test_NCFFeat(self):
     exp_folder = "ncfFeat_test"
     exp = ModelTraining(exp_folder)
     hyperparams_feats = {'batch_size': 64, 'epochs': 150, 'layers': '[64,32,16,8]', \
                'learner': 'adam', 'lr': 0.001, 'mf_pretrain': '', 'mlp_pretrain': '', \
                'num_factors': 8, 'num_neg': 4, 'out': 1, 'path': 'Data/', \
                'reg_layers': '[0,0,0,0]', 'reg_mf': 0, 'verbose': 1}
     methods = [
         Neural_Collaborative_Filtering_Features(
             hyperparams_feats,
             'Neural Collaborative Filtering',
             'feature_based',
             warm_start=True)
     ]
     exp.execute(dataset, methods, metrics, nruns=1)
     df = exp.getResultsWrapper()
     rmses = df['Value'].values
     for rmse in rmses:
         assert rmse < 2  # arbitrary threshold for reasonable prediction
Ejemplo n.º 11
0
                   'reg_layers': '[0,0,0,.01]', 'reg_mf': 0.01, 'verbose': 1}


    methods = [MtlGP.HadamardMTL(num_iters=150, length_scale=57, noise_covar=.24, n_inducing_points=1000, \
                                composite=False, learning_rate=.07, validate=False,bias=False,stabilize=True),
               MtlGP.GPyFullMTL(num_iters=72, length_scale=58.828, noise_covar=0.31587, n_inducing_points=500,  num_tasks=3, learning_rate=0.02729),
               NCF_MTL.Neural_Collaborative_Filtering_FeaturesMTLMLP(hyperparams_mtlmlp,'MTL NCF MLP', 'feature_based'),
               NCF_MTL.Neural_Collaborative_Filtering_FeaturesMTLMF(hyperparams_mtlmf,'NCF_MTL_MF', 'feature_based'),
               SVD_MF(n_factors=10),
               KNN_Normalized(k=10)
              ]

    metrics = ['rmse','mae']

    exp_folder = __file__.strip('.py')
    exp = ModelTraining(exp_folder)
    exp.execute(dataset, methods, metrics, nruns=1) #increase n runs for more accurate error
    exp.generate_report()

    
    
"""

frozen hyperparams-- sometimes they do worse


    hyperparams_mtlmf = {'batch_size': 64, 'epochs': 227, 'layers': '[64,32,16,8]', \
                   'learner': 'sgd', 'lr': 1.00293510662245e-05,'mf_lr': 0.000111324, 'num_factors': 100, \
                   'reg_layers': '[0,0,0,.01]', 'reg_mf':  0.009970084324087263, 'verbose': 1}
    hyperparams_mtlmlp = {'batch_size': 64, 'epochs': 238, 'layers': '[64,32,16,8]', \
                   'learner': 'sgd', 'lr': 0.00042715,'mlp_lr': .001, 'num_factors': 84, \