コード例 #1
0
def test_ica_aroma(tmp_path):
    """Test ICA AROMA related file input."""
    aroma_nii, _ = create_tmp_filepath(
        tmp_path, image_type="ica_aroma", copy_confounds=True
    )
    regular_nii, _ = create_tmp_filepath(
        tmp_path, image_type="regular", copy_confounds=True
    )
    # Aggressive strategy
    conf, _ = load_confounds(
        regular_nii, strategy=("ica_aroma", ), ica_aroma="basic"
    )
    for col_name in conf.columns:
        # only aroma and non-steady state columns will be present
        assert re.match("(?:aroma_motion_+|non_steady_state+)", col_name)

    # Non-aggressive strategy
    conf, _ = load_confounds(
        aroma_nii, strategy=("ica_aroma", ), ica_aroma="full"
    )
    assert conf.size == 0

    # invalid combination of strategy and option
    with pytest.raises(ValueError) as exc_info:
        conf, _ = load_confounds(
            regular_nii, strategy=("ica_aroma", ), ica_aroma="invalid"
        )
    assert "Current input: invalid" in exc_info.value.args[0]
コード例 #2
0
def test_sample_mask(tmp_path):
    """Test load method and sample mask."""
    regular_nii, regular_conf = create_tmp_filepath(
        tmp_path, image_type="regular", copy_confounds=True
    )

    reg, mask = load_confounds(
        regular_nii, strategy=("motion", "scrub"), scrub=5, fd_threshold=0.15
    )
    # the current test data has 6 time points marked as motion outliers,
    # and one nonsteady state (overlap with the first motion outlier)
    # 2 time points removed due to the "full" srubbing strategy (remove segment
    # shorter than 5 volumes)
    assert reg.shape[0] - len(mask) == 8
    # nilearn requires unmasked confound regressors
    assert reg.shape[0] == 30

    # non steady state will always be removed
    reg, mask = load_confounds(regular_nii, strategy=("motion", ))
    assert reg.shape[0] - len(mask) == 1

    # When no non-steady state volumes are present
    conf_data, _ = get_leagal_confound(non_steady_state=False)
    conf_data.to_csv(regular_conf, sep="\t", index=False)  # save to tmp
    reg, mask = load_confounds(regular_nii, strategy=("motion", ))
    assert mask is None

    # When no volumes needs removing (very liberal motion threshould)
    reg, mask = load_confounds(
        regular_nii, strategy=("motion", "scrub"), scrub=0, fd_threshold=4
    )
    assert mask is None
コード例 #3
0
def test_nilearn_regress(tmp_path, test_strategy, param):
    """Try regressing out all motion types without sample mask."""
    img_nii, _ = create_tmp_filepath(
        tmp_path, copy_confounds=True, copy_json=True
    )
    confounds, _ = load_confounds(img_nii, strategy=test_strategy, **param)
    _regression(confounds, tmp_path)
コード例 #4
0
def test_strategy_scrubbing(tmp_path):
    """Check user specified input for scrubbing strategy."""
    file_nii, _ = create_tmp_filepath(tmp_path,
                                      image_type="regular",
                                      copy_confounds=True,
                                      copy_json=True)
    confounds, sample_mask = load_confounds_strategy(
        file_nii, denoise_strategy="scrubbing", fd_threshold=0.15)
    # out of 30 vols, should have 6 motion outliers from scrubbing,
    # and 2 vol removed by srubbing strategy "full"
    assert len(sample_mask) == 22
    # shape of confound regressors untouched
    assert confounds.shape[0] == 30
    # also load confounds with very liberal scrubbing thresholds
    # this should not produce an error
    confounds, sample_mask = load_confounds_strategy(
        file_nii,
        denoise_strategy="scrubbing",
        fd_threshold=1,
        std_dvars_threshold=5)
    assert len(sample_mask) == 29  # only non-steady volumes removed

    # maker sure global signal works
    confounds, sample_mask = load_confounds_strategy(
        file_nii, denoise_strategy="scrubbing", global_signal="full")
    for check in [
            "global_signal",
            "global_signal_derivative1",
            "global_signal_power2",
            "global_signal_derivative1_power2",
    ]:
        assert check in confounds.columns
コード例 #5
0
def test_non_steady_state(tmp_path):
    """Warn when 'non_steady_state' is in strategy."""
    # supplying 'non_steady_state' in strategy is not necessary
    # check warning is correctly raised
    img, conf = create_tmp_filepath(tmp_path, copy_confounds=True)
    warning_message = (r"Non-steady state")
    with pytest.warns(UserWarning, match=warning_message):
        load_confounds(img, strategy=('non_steady_state', 'motion'))
コード例 #6
0
def test_motion(tmp_path, motion, param, expected_suffixes):
    img_nii, _ = create_tmp_filepath(tmp_path, copy_confounds=True)
    conf, _ = load_confounds(img_nii, strategy=("motion", ), motion=motion)
    for suff in SUFFIXES:
        if suff in expected_suffixes:
            assert f"{param}{suff}" in conf.columns
        else:
            assert f"{param}{suff}" not in conf.columns
コード例 #7
0
def test_n_compcor(tmp_path, compcor, n_compcor, test_keyword, test_n):
    img_nii, _ = create_tmp_filepath(
        tmp_path, copy_confounds=True, copy_json=True
    )
    conf, _ = load_confounds(
        img_nii, strategy=("high_pass", "compcor", ), compcor=compcor,
        n_compcor=n_compcor
    )
    assert sum(True for col in conf.columns if test_keyword in col) == test_n
コード例 #8
0
def test_load_confounds_strategy(tmp_path, denoise_strategy, image_type):
    """Smoke test with no extra inputs."""
    file_nii, _ = create_tmp_filepath(tmp_path,
                                      image_type=image_type,
                                      copy_confounds=True,
                                      copy_json=True)
    confounds, _ = load_confounds_strategy(file_nii,
                                           denoise_strategy=denoise_strategy)
    assert isinstance(confounds, pd.DataFrame)
コード例 #9
0
def test_load_non_nifti(tmp_path):
    """Test non-nifti and invalid file type as input."""
    # tsv file - unsupported input
    _, tsv = create_tmp_filepath(tmp_path, copy_confounds=True, copy_json=True)

    with pytest.raises(ValueError):
        load_confounds(str(tsv))

    # cifti file should be supported
    cifti, _ = create_tmp_filepath(
        tmp_path, image_type="cifti", copy_confounds=True, copy_json=True
    )
    conf, _ = load_confounds(cifti)
    assert conf.size != 0

    # gifti support
    gifti, _ = create_tmp_filepath(
        tmp_path, image_type="gifti", copy_confounds=True, copy_json=True
    )
    conf, _ = load_confounds(gifti)
    assert conf.size != 0
コード例 #10
0
def test_strategy_compcor(tmp_path):
    """Check user specified input for compcor strategy."""
    file_nii, _ = create_tmp_filepath(tmp_path,
                                      image_type="regular",
                                      copy_confounds=True,
                                      copy_json=True)
    confounds, _ = load_confounds_strategy(file_nii,
                                           denoise_strategy="compcor")
    compcor_col_str_anat = "".join(confounds.columns)
    assert "t_comp_cor_" not in compcor_col_str_anat
    assert ("a_comp_cor_57" not in compcor_col_str_anat
            )  # this one comes from the white matter mask
コード例 #11
0
def test_irrelevant_input(tmp_path):
    """Check invalid input raising correct warning or error message."""
    file_nii, _ = create_tmp_filepath(tmp_path,
                                      image_type="regular",
                                      copy_confounds=True,
                                      copy_json=True)
    warning_message = (r"parameters accepted: \['motion', 'wm_csf', "
                       "'global_signal', 'demean']")
    with pytest.warns(UserWarning, match=warning_message):
        load_confounds_strategy(file_nii,
                                denoise_strategy="simple",
                                ica_aroma="full")
    # invalid strategy
    with pytest.raises(KeyError, match="blah"):
        load_confounds_strategy(file_nii, denoise_strategy="blah")
コード例 #12
0
def test_strategies(tmp_path, denoise_strategy, image_type):
    """Check defaults setting of each preset strategy."""
    file_nii, _ = create_tmp_filepath(tmp_path,
                                      image_type=image_type,
                                      copy_confounds=True,
                                      copy_json=True)
    confounds, _ = load_confounds_strategy(file_nii,
                                           denoise_strategy=denoise_strategy)
    # Check that all fixed name model categories have been successfully loaded
    list_check = _get_headers(denoise_strategy)
    for col in confounds.columns:
        # Check that all possible names exists
        checker = [
            re.match(keyword, col) is not None for keyword in list_check
        ]
        assert sum(checker) == 1
コード例 #13
0
def test_inputs(tmp_path, image_type):
    """Test multiple images as input."""
    # generate files
    files = []
    for i in range(2):  # gifti edge case
        nii, _ = create_tmp_filepath(
            tmp_path,
            suffix=f"img{i+1}",
            image_type=image_type,
            copy_confounds=True,
            copy_json=True,
        )
        files.append(nii)

    if image_type == "ica_aroma":
        conf, _ = load_confounds(files, strategy=("ica_aroma", ))
    else:
        conf, _ = load_confounds(files)
    assert len(conf) == 2
コード例 #14
0
def test_invalid_filetype(tmp_path):
    """Invalid file types/associated files for load method."""
    bad_nii, bad_conf = create_tmp_filepath(tmp_path, copy_confounds=True,
                                            old_derivative_suffix=False)
    conf, _ = load_confounds(bad_nii)

    # more than one legal filename for confounds
    add_conf = "test_desc-confounds_regressors.tsv"
    leagal_confounds, _ = get_leagal_confound()
    leagal_confounds.to_csv(tmp_path / add_conf, sep="\t", index=False)
    with pytest.raises(ValueError) as info:
        load_confounds(bad_nii)
    assert "more than one" in str(info.value)
    (tmp_path / add_conf).unlink()  # Remove for the rest of the tests to run

    # invalid fmriprep version: confound file with no header (<1.0)
    fake_confounds = np.random.rand(30, 20)
    np.savetxt(bad_conf, fake_confounds, delimiter="\t")
    with pytest.raises(ValueError) as error_log:
        load_confounds(bad_nii)
    assert "The confound file contains no header." in str(error_log.value)

    # invalid fmriprep version: old camel case header (<1.2)
    leagal_confounds, _ = get_leagal_confound()
    camel_confounds = leagal_confounds.copy()
    camel_confounds.columns = [
        _to_camel_case(col_name) for col_name in leagal_confounds.columns
    ]
    camel_confounds.to_csv(bad_conf, sep="\t", index=False)
    with pytest.raises(ValueError) as error_log:
        load_confounds(bad_nii)
    assert "contains header in camel case." in str(error_log.value)

    # create a empty nifti file with no associated confound file
    # We only need the path to check this
    no_conf = "no_confound_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz"
    no_confound = tmp_path / no_conf
    no_confound.touch()
    with pytest.raises(ValueError):
        load_confounds(bad_nii)
コード例 #15
0
def test_get_file_name(tmp_path, flag, suffix):
    img, _ = create_tmp_filepath(tmp_path, old_derivative_suffix=flag)
    conf = _get_file_name(img)
    assert suffix in conf
コード例 #16
0
def _simu_img(tmp_path, demean):
    """Simulate an nifti image based on confound file with some parts confounds
    and some parts noise."""
    file_nii, _ = create_tmp_filepath(tmp_path, copy_confounds=True)
    # set the size of the image matrix
    nx = 5
    ny = 5
    # the actual number of slices will actually be double of that
    # as we will stack slices with confounds on top of slices with noise
    nz = 2
    # Load a simple 6 parameters motion models as confounds
    # demean set to False just for simulating signal based on the original
    # state
    confounds, _ = load_confounds(
        file_nii, strategy=("motion", ), motion="basic", demean=False
    )

    X = _handle_non_steady(confounds)
    X = X.values
    # the number of time points is based on the example confound file
    nt = X.shape[0]
    # initialize an empty 4D volume
    vol = np.zeros([nx, ny, 2 * nz, nt])
    vol_conf = np.zeros([nx, ny, 2 * nz])
    vol_rand = np.zeros([nx, ny, 2 * nz])

    # create random noise and a random mixture of confounds standardized
    # to zero mean and unit variance
    if sys.version_info < (3, 7):  # fall back to random state for 3.6
        np.random.RandomState(42)
        beta = np.random.rand(nx * ny * nz, X.shape[1])
        tseries_rand = scale(np.random.rand(nx * ny * nz, nt), axis=1)
    else:
        randome_state = np.random.default_rng(0)
        beta = randome_state.random((nx * ny * nz, X.shape[1]))
        tseries_rand = scale(randome_state.random((nx * ny * nz, nt)), axis=1)
    # create the confound mixture
    tseries_conf = scale(np.matmul(beta, X.transpose()), axis=1)

    # fill the first half of the 4D data with the random mixture
    vol[:, :, 0:nz, :] = tseries_conf.reshape(nx, ny, nz, nt)
    vol_conf[:, :, 0:nz] = 1

    # create random noise in the second half of the 4D data
    vol[:, :, range(nz, 2 * nz), :] = tseries_rand.reshape(nx, ny, nz, nt)
    vol_rand[:, :, range(nz, 2 * nz)] = 1

    # Shift the mean to non-zero
    vol = vol + 10

    # create an nifti image with the data, and corresponding mask
    img = Nifti1Image(vol, np.eye(4))
    mask_conf = Nifti1Image(vol_conf, np.eye(4))
    mask_rand = Nifti1Image(vol_rand, np.eye(4))

    # generate the associated confounds for testing
    test_confounds, _ = load_confounds(
        file_nii, strategy=("motion",), motion="basic", demean=demean)
    # match how we extend the length to increase the degree of freedom
    test_confounds = _handle_non_steady(test_confounds)
    sample_mask = np.arange(test_confounds.shape[0])[1:]
    return img, mask_conf, mask_rand, test_confounds, sample_mask
コード例 #17
0
def test_confounds2df(tmp_path):
    """Check auto-detect of confonds from an fMRI nii image."""
    img_nii, _ = create_tmp_filepath(tmp_path, copy_confounds=True)
    confounds, _ = load_confounds(img_nii)
    assert "trans_x" in confounds.columns
コード例 #18
0
def test_not_found_exception(tmp_path):
    """Check various file or parameter missing scenario."""
    # Create invalid confound file in temporary dir
    img_missing_confounds, bad_conf = create_tmp_filepath(
        tmp_path, copy_confounds=True, copy_json=False
    )
    missing_params = ["trans_y", "trans_x_derivative1", "rot_z_power2"]
    missing_keywords = ["cosine"]

    leagal_confounds = pd.read_csv(bad_conf, delimiter="\t", encoding="utf-8")
    cosine = [
        col_name
        for col_name in leagal_confounds.columns
        if "cosine" in col_name
    ]
    aroma = [
        col_name
        for col_name in leagal_confounds.columns
        if "aroma" in col_name
    ]
    missing_confounds = leagal_confounds.drop(
        columns=missing_params + cosine + aroma
    )
    missing_confounds.to_csv(bad_conf, sep="\t", index=False)

    with pytest.raises(ValueError) as exc_info:
        load_confounds(
            img_missing_confounds,
            strategy=("high_pass", "motion", "global_signal", ),
            global_signal="full",
            motion="full",
        )
    assert f"{missing_params}" in exc_info.value.args[0]
    assert f"{missing_keywords}" in exc_info.value.args[0]

    # loading anat compcor should also raise an error, because the json file is
    # missing for that example dataset
    with pytest.raises(ValueError):
        load_confounds(
            img_missing_confounds,
            strategy=("high_pass", "compcor"),
            compcor="anat_combined",
        )

    # catch invalid compcor option
    with pytest.raises(KeyError):
        load_confounds(
            img_missing_confounds, strategy=("high_pass", "compcor"),
            compcor="blah"
        )

    # Aggressive ICA-AROMA strategy requires
    # default nifti and noise ICs in confound file
    # correct nifti but missing noise regressor
    with pytest.raises(ValueError) as exc_info:
        load_confounds(
            img_missing_confounds, strategy=("ica_aroma", ), ica_aroma="basic"
        )
    assert "aroma" in exc_info.value.args[0]

    # Aggressive ICA-AROMA strategy requires
    # default nifti
    aroma_nii, _ = create_tmp_filepath(
        tmp_path, image_type="ica_aroma", suffix="aroma"
    )
    with pytest.raises(ValueError) as exc_info:
        load_confounds(
            aroma_nii, strategy=("ica_aroma", ), ica_aroma="basic"
        )
    assert "Invalid file type" in exc_info.value.args[0]

    # non aggressive ICA-AROMA strategy requires
    # desc-smoothAROMAnonaggr nifti file
    with pytest.raises(ValueError) as exc_info:
        load_confounds(
            img_missing_confounds, strategy=("ica_aroma", ), ica_aroma="full"
        )
    assert "desc-smoothAROMAnonaggr_bold" in exc_info.value.args[0]

    # no confound files along the image file
    (tmp_path / bad_conf).unlink()
    with pytest.raises(ValueError) as exc_info:
        load_confounds(img_missing_confounds)
    assert "Could not find associated confound file." in exc_info.value.args[0]