def test_submission(path_kit): submissions = sorted(glob.glob(os.path.join(path_kit, 'submissions', '*'))) for sub in submissions: # FIXME: to be removed once el-nino tests is fixed. if 'el_nino' in sub: pytest.xfail('el-nino is failing due to xarray.') else: assert_submission(ramp_kit_dir=path_kit, ramp_data_dir=path_kit, ramp_submission_dir=os.path.join( path_kit, 'submissions'), submission=os.path.basename(sub), is_pickle=True, save_output=False, retrain=True) # testing the partial training workflow if 'titanic_old' in sub or 'air_passengers_old' in sub: assert_submission(ramp_kit_dir=path_kit, ramp_data_dir=path_kit, ramp_submission_dir=os.path.join( path_kit, 'submissions'), submission=os.path.basename(sub), is_pickle=True, is_partial_train=True, save_output=False, retrain=True)
def test_external_imports(tmpdir): # checking imports from an external_imports folder located in the # ramp_kit_dir # temporary kit path_kit = tmpdir.join("titanic_external_imports") shutil.copytree(os.path.join(PATH, "kits", "titanic"), path_kit) problem_path = os.path.join(path_kit, "problem.py") submissions_dir = os.path.join(path_kit, 'submissions') submission_path = os.path.join(submissions_dir, 'starting_kit') estimator_path = os.path.join(submission_path, "estimator.py") # module to be imported ext_module_dir = path_kit.mkdir("external_imports").mkdir("utils") with open(os.path.join(ext_module_dir, "test_imports.py"), 'w+') as f: f.write(dedent(""" x = 2 """)) for path in [problem_path, estimator_path]: with open(path, 'a') as f: f.write( dedent(""" from utils import test_imports assert test_imports.x == 2 """)) assert_submission(ramp_kit_dir=path_kit, ramp_data_dir=path_kit, ramp_submission_dir=submissions_dir, submission=submission_path, is_pickle=True, save_output=False, retrain=True)
def test_blending(): assert_submission(ramp_kit_dir=os.path.join(PATH, "kits", "iris"), ramp_data_dir=os.path.join(PATH, "kits", "iris"), ramp_submission_dir=os.path.join(PATH, "kits", "iris", "submissions"), submission='starting_kit', is_pickle=True, save_output=True, retrain=True) assert_submission(ramp_kit_dir=os.path.join(PATH, "kits", "iris"), ramp_data_dir=os.path.join(PATH, "kits", "iris"), ramp_submission_dir=os.path.join(PATH, "kits", "iris", "submissions"), submission='random_forest_10_10', is_pickle=True, save_output=True, retrain=True) blend_submissions(['starting_kit', 'random_forest_10_10'], ramp_kit_dir=os.path.join(PATH, "kits", "iris"), ramp_data_dir=os.path.join(PATH, "kits", "iris"), ramp_submission_dir=os.path.join(PATH, "kits", "iris", "submissions"), save_output=True) # cleaning up so next test doesn't try to train "training_output" shutil.rmtree( os.path.join(PATH, "kits", "iris", "submissions", "training_output"))
def test_submission(path_kit): submissions = sorted(glob.glob(os.path.join(path_kit, 'submissions', '*'))) for sub in submissions: assert_submission(ramp_kit_dir=path_kit, ramp_data_dir=path_kit, submission=os.path.basename(sub), is_pickle=True, save_y_preds=False, retrain=False)
def test_submission(path_kit): submissions = sorted(glob.glob(os.path.join(path_kit, 'submissions', '*'))) for sub in submissions: # FIXME: to be removed once el-nino tests is fixed. if 'el_nino' in sub: pytest.xfail('el-nino is failing due to xarray.') else: assert_submission(ramp_kit_dir=path_kit, ramp_data_dir=path_kit, submission=os.path.basename(sub), is_pickle=True, save_y_preds=False, retrain=True)
def test_blending(): assert_submission(ramp_kit_dir=os.path.join(PATH, "kits", "iris"), ramp_data_dir=os.path.join(PATH, "kits", "iris"), submission='starting_kit', is_pickle=True, save_y_preds=True, retrain=True) assert_submission(ramp_kit_dir=os.path.join(PATH, "kits", "iris"), ramp_data_dir=os.path.join(PATH, "kits", "iris"), submission='random_forest_10_10', is_pickle=True, save_y_preds=True, retrain=True) blend_submissions(['starting_kit', 'random_forest_10_10'], ramp_kit_dir=os.path.join(PATH, "kits", "iris"), ramp_data_dir=os.path.join(PATH, "kits", "iris"))