예제 #1
0
def test_save_varying_number_of_datasets_with_overwrite(
        setup_concat_windows_dataset, tmpdir):
    concat_windows_dataset = setup_concat_windows_dataset
    concat_windows_dataset.save(path=tmpdir, overwrite=False)
    subset = concat_windows_dataset.split([0])['0']
    with pytest.warns(UserWarning, match='The number of saved datasets'):
        subset.save(path=tmpdir, overwrite=True)

    # assert no warning raised when there are as many subdirectories than before
    with pytest.warns(None) as raised_warnings:
        concat_windows_dataset.save(path=tmpdir, overwrite=True)
        assert len(raised_warnings) == 0

    # assert no warning raised when there are more subdirectories than before
    double_concat_windows_dataset = BaseConcatDataset(
        [concat_windows_dataset, concat_windows_dataset])
    with pytest.warns(None) as raised_warnings:
        double_concat_windows_dataset.save(path=tmpdir, overwrite=True)
        assert len(raised_warnings) == 0
예제 #2
0
def test_preprocess_overwrite(base_concat_ds, tmp_path, overwrite):
    preprocessors = [Preprocessor('crop', tmax=10, include_tmax=False)]

    # Create temporary directory with preexisting files
    save_dir = str(tmp_path)
    for i, ds in enumerate(base_concat_ds.datasets):
        concat_ds = BaseConcatDataset([ds])
        save_subdir = os.path.join(save_dir, str(i))
        os.makedirs(save_subdir)
        concat_ds.save(save_subdir, overwrite=True)

    if overwrite:
        preprocess(base_concat_ds, preprocessors, save_dir, overwrite=True)
        # Make sure the serialized data is preprocessed
        preproc_concat_ds = load_concat_dataset(save_dir, True)
        assert all([len(ds.raw.times) == 2500
                    for ds in preproc_concat_ds.datasets])
    else:
        with pytest.raises(FileExistsError):
            preprocess(base_concat_ds, preprocessors, save_dir,
                       overwrite=False)