def test_run_cross_validation(self): print "test cross validation..." train_test_model_class = SklearnRandomForestTrainTestModel model_param = {'norm_type': 'normalize', 'random_state': 0} indices_train = range(9) indices_test = range(9) output = ModelCrossValidation.run_cross_validation( train_test_model_class, model_param, self.features, indices_train, indices_test) self.assertAlmostEquals(output['stats']['SRCC'], 0.93333333333333324, places=4) self.assertAlmostEquals(output['stats']['PCC'], 0.97754442316039469, places=4) self.assertAlmostEquals(output['stats']['KENDALL'], 0.83333333333333337, places=4) self.assertAlmostEquals(output['stats']['RMSE'], 0.17634739353518517, places=4) self.assertEquals(output['model'].TYPE, "RANDOMFOREST")
def test_run_cross_validation(self): print "test cross validation..." train_test_model_class = RandomForestTrainTestModel model_param = {'norm_type': 'normalize', 'random_state': 0} feature_df_file = config.ROOT + \ "/python/test/resource/sample_feature_extraction_results.json" feature_df = pd.DataFrame.from_dict( eval(open(feature_df_file, "r").read())) indices_train = range(250) indices_test = range(250, 300) output = ModelCrossValidation.run_cross_validation( train_test_model_class, model_param, feature_df, indices_train, indices_test) self.assertAlmostEquals(output['stats']['SRCC'], 0.93493301443051136, places=4) self.assertAlmostEquals(output['stats']['PCC'], 0.9413390374529329, places=4) self.assertAlmostEquals(output['stats']['KENDALL'], 0.78029280419726044, places=4) self.assertAlmostEquals(output['stats']['RMSE'], 0.32357946626958406, places=4) self.assertEquals(output['model'].TYPE, "RANDOMFOREST")
def test_run_cross_validation(self): print "test cross validation..." train_test_model_class = SklearnRandomForestTrainTestModel model_param = {'norm_type':'normalize', 'random_state': 0} indices_train = range(9) indices_test = range(9) output = ModelCrossValidation.run_cross_validation( train_test_model_class, model_param, self.features, indices_train, indices_test) self.assertAlmostEquals(output['stats']['SRCC'], 0.93333333333333324, places=4) self.assertAlmostEquals(output['stats']['PCC'], 0.97754442316039469, places=4) self.assertAlmostEquals(output['stats']['KENDALL'], 0.83333333333333337, places=4) self.assertAlmostEquals(output['stats']['RMSE'], 0.17634739353518517, places=4) self.assertEquals(output['model'].TYPE, "RANDOMFOREST")
def test_run_cross_validation(self): print "test cross validation..." train_test_model_class = RandomForestTrainTestModel model_param = {'norm_type':'normalize', 'random_state': 0} feature_df_file = config.ROOT + \ "/python/test/resource/sample_feature_extraction_results.json" feature_df = pd.DataFrame.from_dict(eval(open(feature_df_file, "r").read())) indices_train = range(250) indices_test = range(250, 300) output = ModelCrossValidation.run_cross_validation( train_test_model_class, model_param, feature_df, indices_train, indices_test) self.assertAlmostEquals(output['stats']['SRCC'], 0.93493301443051136) self.assertAlmostEquals(output['stats']['PCC'], 0.9413390374529329) self.assertAlmostEquals(output['stats']['KENDALL'], 0.78029280419726044) self.assertAlmostEquals(output['stats']['RMSE'], 0.32357946626958406) self.assertEquals(output['model'].TYPE, "RANDOMFOREST")