Ejemplo n.º 1
0
def run_on_real_dataset(dataset_dir):
    """`dataset_dir` should contain a file `*.data` in NumPy format."""
    if os.path.isdir(dataset_dir):
        data_files = [
            x for x in os.listdir(dataset_dir) if x.endswith('.data')
        ]
        if len(data_files) != 1:
            raise ValueError("The dataset directory {} ".format(dataset_dir) +
                             "should contain one `.data` file but got " +
                             "{}.".format(data_files))
        data_file = data_files[0]
        data_path = os.path.join(dataset_dir, data_file)
        name_expe = data_file.split('.')[0]

        # Load real dataset and binarize
        perfs = binarize(np.loadtxt(data_path))
        # da_matrix = DAMatrix(perfs=perf, name=name_expe)
        da_matrix = DAMatrix.load(dataset_dir)
        da_matrix.perfs = perfs
        meta_learners = get_the_meta_learners(
            exclude_optimal=True)[1:]  # Exclude random

        run_expe(da_matrix,
                 meta_learners=meta_learners,
                 name_expe=name_expe,
                 with_once_random=True,
                 show_legend=True)
    else:
        raise ValueError("Not a directory: {}".format(dataset_dir))
Ejemplo n.º 2
0
def test_nfldamatrix():
    da_matrix = NFLDAMatrix()
    path_to_dir = da_matrix.save()
    da_matrix2 = DAMatrix.load(path_to_dir)
    print(da_matrix.perfs)
    print(da_matrix2.perfs)