def test_concat_niimgs(): shape = (10, 11, 12) affine = np.eye(4) img1 = Nifti1Image(np.ones(shape), affine) img2 = Nifti1Image(np.ones(shape), 2 * affine) img3 = Nifti1Image(np.zeros(shape), affine) img4d = Nifti1Image(np.ones(shape + (2, )), affine) concatenated = _utils.concat_niimgs((img1, img3, img1)) concatenate_true = np.ones(shape + (3,)) concatenate_true[..., 1] = 0 np.testing.assert_almost_equal(concatenated.get_data(), concatenate_true) assert_raises(ValueError, _utils.concat_niimgs, [img1, img2]) # Smoke-test the accept_4d assert_raises(ValueError, _utils.concat_niimgs, [img1, img4d]) concatenated = _utils.concat_niimgs([img1, img4d], accept_4d=True) assert_equal(concatenated.shape[3], 3) _, tmpimg1 = tempfile.mkstemp(suffix='.nii') _, tmpimg2 = tempfile.mkstemp(suffix='.nii') try: nibabel.save(img1, tmpimg1) nibabel.save(img2, tmpimg2) nose.tools.assert_raises(ValueError, _utils.concat_niimgs, [tmpimg1, tmpimg2]) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2)
def test_iterator_generator(): # Create a list of random images rng = np.random.RandomState(42) list_images = [ Nifti1Image( rng.random_sample((10, 10, 10)), np.eye(4) ) for i in range(10) ] cc = _utils.concat_niimgs(list_images) assert cc.shape[-1] == 10 assert_array_almost_equal(get_data(cc)[..., 0], get_data(list_images[0])) # Same with iteration i = image.iter_img(list_images) cc = _utils.concat_niimgs(i) assert cc.shape[-1] == 10 assert_array_almost_equal(get_data(cc)[..., 0], get_data(list_images[0])) # Now, a generator b = [] g = nifti_generator(b) cc = _utils.concat_niimgs(g) assert cc.shape[-1] == 10 assert len(b) == 10
def test_concat_niimgs(): shape = (10, 11, 12) affine = np.eye(4) niimg1 = Nifti1Image(np.ones(shape), affine) niimg2 = Nifti1Image(np.ones(shape), 2 * affine) niimg3 = Nifti1Image(np.zeros(shape), affine) niimg4d = Nifti1Image(np.ones(shape + (2, )), affine) concatenated = _utils.concat_niimgs((niimg1, niimg3, niimg1)) concatenate_true = np.ones(shape + (3, )) concatenate_true[..., 1] = 0 np.testing.assert_almost_equal(concatenated.get_data(), concatenate_true) assert_raises(ValueError, _utils.concat_niimgs, [niimg1, niimg2]) # Smoke-test the accept_4d assert_raises(ValueError, _utils.concat_niimgs, [niimg1, niimg4d]) concatenated = _utils.concat_niimgs([niimg1, niimg4d], accept_4d=True) assert_equal(concatenated.shape[3], 3) _, tmpimg1 = tempfile.mkstemp(suffix='.nii') _, tmpimg2 = tempfile.mkstemp(suffix='.nii') try: nibabel.save(niimg1, tmpimg1) nibabel.save(niimg2, tmpimg2) nose.tools.assert_raises(ValueError, _utils.concat_niimgs, [tmpimg1, tmpimg2]) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2)
def test_concat_niimgs(): # create images different in affine and 3D/4D shape shape = (10, 11, 12) affine = np.eye(4) img1 = Nifti1Image(np.ones(shape), affine) img2 = Nifti1Image(np.ones(shape), 2 * affine) img3 = Nifti1Image(np.zeros(shape), affine) img4d = Nifti1Image(np.ones(shape + (2, )), affine) shape2 = (12, 11, 10) img1b = Nifti1Image(np.ones(shape2), affine) shape3 = (11, 22, 33) img1c = Nifti1Image(np.ones(shape3), affine) # Regression test for #601. Dimensionality of first image was not checked # properly _dimension_error_msg = ("Input data has incompatible dimensionality: " "Expected dimension is 4D and you provided " "a list of 4D images \(5D\)") assert_raises_regex(DimensionError, _dimension_error_msg, _utils.concat_niimgs, [img4d], ensure_ndim=4) # check basic concatenation with equal shape/affine concatenated = _utils.concat_niimgs((img1, img3, img1)) assert_raises_regex(DimensionError, _dimension_error_msg, _utils.concat_niimgs, [img1, img4d]) # smoke-test auto_resample concatenated = _utils.concat_niimgs((img1, img1b, img1c), auto_resample=True) assert_true(concatenated.shape == img1.shape + (3, )) # check error for non-forced but necessary resampling assert_raises_regex(ValueError, 'Field of view of image', _utils.concat_niimgs, [img1, img2], auto_resample=False) # test list of 4D niimgs as input tmpimg1 = tempfile.mktemp(suffix='.nii') tmpimg2 = tempfile.mktemp(suffix='.nii') try: nibabel.save(img1, tmpimg1) nibabel.save(img3, tmpimg2) concatenated = _utils.concat_niimgs([tmpimg1, tmpimg2]) assert_array_equal(concatenated.get_data()[..., 0], img1.get_data()) assert_array_equal(concatenated.get_data()[..., 1], img3.get_data()) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2) img5d = Nifti1Image(np.ones((2, 2, 2, 2, 2)), affine) assert_raises_regex( TypeError, 'Concatenated images must be 3D or 4D. ' 'You gave a list of 5D images', _utils.concat_niimgs, [img5d, img5d])
def test_concat_niimgs(): # create images different in affine and 3D/4D shape shape = (10, 11, 12) affine = np.eye(4) img1 = Nifti1Image(np.ones(shape), affine) img2 = Nifti1Image(np.ones(shape), 2 * affine) img3 = Nifti1Image(np.zeros(shape), affine) img4d = Nifti1Image(np.ones(shape + (2, )), affine) shape2 = (12, 11, 10) img1b = Nifti1Image(np.ones(shape2), affine) shape3 = (11, 22, 33) img1c = Nifti1Image(np.ones(shape3), affine) # Regression test for #601. Dimensionality of first image was not checked # properly _dimension_error_msg = ("Input data has incompatible dimensionality: " "Expected dimension is 4D and you provided " "a list of 4D images \(5D\)") assert_raises_regex(DimensionError, _dimension_error_msg, _utils.concat_niimgs, [img4d], ensure_ndim=4) # check basic concatenation with equal shape/affine concatenated = _utils.concat_niimgs((img1, img3, img1)) assert_raises_regex(DimensionError, _dimension_error_msg, _utils.concat_niimgs, [img1, img4d]) # smoke-test auto_resample concatenated = _utils.concat_niimgs((img1, img1b, img1c), auto_resample=True) assert_true(concatenated.shape == img1.shape + (3, )) # check error for non-forced but necessary resampling assert_raises_regex(ValueError, 'Field of view of image', _utils.concat_niimgs, [img1, img2], auto_resample=False) # test list of 4D niimgs as input tmpimg1 = tempfile.mktemp(suffix='.nii') tmpimg2 = tempfile.mktemp(suffix='.nii') try: nibabel.save(img1, tmpimg1) nibabel.save(img3, tmpimg2) concatenated = _utils.concat_niimgs([tmpimg1, tmpimg2]) assert_array_equal( concatenated.get_data()[..., 0], img1.get_data()) assert_array_equal( concatenated.get_data()[..., 1], img3.get_data()) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2) img5d = Nifti1Image(np.ones((2, 2, 2, 2, 2)), affine) assert_raises_regex(TypeError, 'Concatenated images must be 3D or 4D. ' 'You gave a list of 5D images', _utils.concat_niimgs, [img5d, img5d])
def test_concat_niimg_dtype(): shape = [2, 3, 4] vols = [nibabel.Nifti1Image( np.zeros(shape + [n_scans]).astype(np.int16), np.eye(4)) for n_scans in [1, 5]] nimg = _utils.concat_niimgs(vols) assert_equal(nimg.get_data().dtype, np.float32) nimg = _utils.concat_niimgs(vols, dtype=None) assert_equal(nimg.get_data().dtype, np.int16)
def test_concat_niimg_dtype(): shape = [2, 3, 4] vols = [nibabel.Nifti1Image( np.zeros(shape + [n_scans]).astype(np.int16), np.eye(4)) for n_scans in [1, 5]] nimg = _utils.concat_niimgs(vols) assert get_data(nimg).dtype == np.float32 nimg = _utils.concat_niimgs(vols, dtype=None) assert get_data(nimg).dtype == np.int16
def test_concat_niimgs(): # create images different in affine and 3D/4D shape shape = (10, 11, 12) affine = np.eye(4) img1 = Nifti1Image(np.ones(shape), affine) img2 = Nifti1Image(np.ones(shape), 2 * affine) img3 = Nifti1Image(np.zeros(shape), affine) img4d = Nifti1Image(np.ones(shape + (2, )), affine) shape2 = (12, 11, 10) img1b = Nifti1Image(np.ones(shape2), affine) shape3 = (11, 22, 33) img1c = Nifti1Image(np.ones(shape2), affine) # check basic concatenation with equal shape/affine concatenated = _utils.concat_niimgs((img1, img3, img1), accept_4d=False) concatenate_true = np.ones(shape + (3, )) # Smoke-test the accept_4d assert_raises_regexp(ValueError, 'image', _utils.concat_niimgs, [img1, img4d]) concatenated = _utils.concat_niimgs([img1, img4d], accept_4d=True) np.testing.assert_equal(concatenated.get_data(), concatenate_true, verbose=0) # smoke-test auto_resample concatenated = _utils.concat_niimgs((img1, img1b, img1c), accept_4d=False, auto_resample=True) assert_true(concatenated.shape == img1.shape + (3, )) # check error for non-forced but necessary resampling assert_raises_regexp(ValueError, 'different from reference affine', _utils.concat_niimgs, [img1, img2], accept_4d=False) # Smoke-test the 4d parsing concatenated = _utils.concat_niimgs([img1, img4d], accept_4d=True) assert_equal(concatenated.shape[3], 3) # test list of 4D niimgs as input _, tmpimg1 = tempfile.mkstemp(suffix='.nii') _, tmpimg2 = tempfile.mkstemp(suffix='.nii') try: nibabel.save(img1, tmpimg1) nibabel.save(img3, tmpimg2) concatenated = _utils.concat_niimgs([tmpimg1, tmpimg2], accept_4d=False) assert_array_equal(concatenated.get_data()[..., 0], img1.get_data()) assert_array_equal(concatenated.get_data()[..., 1], img3.get_data()) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2)
def test_concat_niimgs(): # create images different in affine and 3D/4D shape shape = (10, 11, 12) affine = np.eye(4) img1 = Nifti1Image(np.ones(shape), affine) img2 = Nifti1Image(np.ones(shape), 2 * affine) img3 = Nifti1Image(np.zeros(shape), affine) img4d = Nifti1Image(np.ones(shape + (2, )), affine) shape2 = (12, 11, 10) img1b = Nifti1Image(np.ones(shape2), affine) shape3 = (11, 22, 33) img1c = Nifti1Image(np.ones(shape2), affine) # check basic concatenation with equal shape/affine concatenated = _utils.concat_niimgs((img1, img3, img1), accept_4d=False) concatenate_true = np.ones(shape + (3,)) # Smoke-test the accept_4d assert_raises_regexp(ValueError, 'image', _utils.concat_niimgs, [img1, img4d]) concatenated = _utils.concat_niimgs([img1, img4d], accept_4d=True) np.testing.assert_equal(concatenated.get_data(), concatenate_true, verbose=0) # smoke-test auto_resample concatenated = _utils.concat_niimgs((img1, img1b, img1c), accept_4d=False, auto_resample=True) assert_true(concatenated.shape == img1.shape + (3, )) # check error for non-forced but necessary resampling assert_raises_regexp(ValueError, 'different from reference affine', _utils.concat_niimgs, [img1, img2], accept_4d=False) # Smoke-test the 4d parsing concatenated = _utils.concat_niimgs([img1, img4d], accept_4d=True) assert_equal(concatenated.shape[3], 3) # test list of 4D niimgs as input _, tmpimg1 = tempfile.mkstemp(suffix='.nii') _, tmpimg2 = tempfile.mkstemp(suffix='.nii') try: nibabel.save(img1, tmpimg1) nibabel.save(img3, tmpimg2) concatenated = _utils.concat_niimgs([tmpimg1, tmpimg2], accept_4d=False) assert_array_equal( concatenated.get_data()[..., 0], img1.get_data()) assert_array_equal( concatenated.get_data()[..., 1], img3.get_data()) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2)
def test_concat_niimgs(): # create images different in affine and 3D/4D shape shape = (10, 11, 12) affine = np.eye(4) img1 = Nifti1Image(np.ones(shape), affine) img2 = Nifti1Image(np.ones(shape), 2 * affine) img3 = Nifti1Image(np.zeros(shape), affine) img4d = Nifti1Image(np.ones(shape + (2, )), affine) shape2 = (12, 11, 10) img1b = Nifti1Image(np.ones(shape2), affine) shape3 = (11, 22, 33) img1c = Nifti1Image(np.ones(shape3), affine) # Regression test for #601. Dimensionality of first image was not checked # properly assert_raises_regex(DimensionError, 'Data must be a 4D Niimg-like object but ' 'you provided', _utils.concat_niimgs, [img4d], ensure_ndim=4) # check basic concatenation with equal shape/affine concatenated = _utils.concat_niimgs((img1, img3, img1)) assert_raises_regex( DimensionError, 'Data must be a 4D Niimg-like object but ' 'you provided', _utils.concat_niimgs, [img1, img4d]) # smoke-test auto_resample concatenated = _utils.concat_niimgs((img1, img1b, img1c), auto_resample=True) assert_true(concatenated.shape == img1.shape + (3, )) # check error for non-forced but necessary resampling assert_raises_regex(ValueError, 'Field of view of image', _utils.concat_niimgs, [img1, img2], auto_resample=False) # test list of 4D niimgs as input _, tmpimg1 = tempfile.mkstemp(suffix='.nii') _, tmpimg2 = tempfile.mkstemp(suffix='.nii') try: nibabel.save(img1, tmpimg1) nibabel.save(img3, tmpimg2) concatenated = _utils.concat_niimgs([tmpimg1, tmpimg2]) assert_array_equal(concatenated.get_data()[..., 0], img1.get_data()) assert_array_equal(concatenated.get_data()[..., 1], img3.get_data()) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2)
def test_concat_niimgs(): # create images different in affine and 3D/4D shape shape = (10, 11, 12) affine = np.eye(4) img1 = Nifti1Image(np.ones(shape), affine) img2 = Nifti1Image(np.ones(shape), 2 * affine) img3 = Nifti1Image(np.zeros(shape), affine) img4d = Nifti1Image(np.ones(shape + (2, )), affine) shape2 = (12, 11, 10) img1b = Nifti1Image(np.ones(shape2), affine) shape3 = (11, 22, 33) img1c = Nifti1Image(np.ones(shape3), affine) # Regression test for #601. Dimensionality of first image was not checked # properly assert_raises_regex(TypeError, 'Data must be a 3D Niimg-like object but ' 'you provided an image of shape', _utils.concat_niimgs, [img4d], ensure_ndim=4) # check basic concatenation with equal shape/affine concatenated = _utils.concat_niimgs((img1, img3, img1)) assert_raises_regex(TypeError, 'Data must be a 3D Niimg-like object but ' 'you provided an image of shape', _utils.concat_niimgs, [img1, img4d]) # smoke-test auto_resample concatenated = _utils.concat_niimgs((img1, img1b, img1c), auto_resample=True) assert_true(concatenated.shape == img1.shape + (3, )) # check error for non-forced but necessary resampling assert_raises_regex(ValueError, 'Field of view of image', _utils.concat_niimgs, [img1, img2], auto_resample=False) # test list of 4D niimgs as input _, tmpimg1 = tempfile.mkstemp(suffix='.nii') _, tmpimg2 = tempfile.mkstemp(suffix='.nii') try: nibabel.save(img1, tmpimg1) nibabel.save(img3, tmpimg2) concatenated = _utils.concat_niimgs([tmpimg1, tmpimg2]) assert_array_equal( concatenated.get_data()[..., 0], img1.get_data()) assert_array_equal( concatenated.get_data()[..., 1], img3.get_data()) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2)
def test_iterator_generator(): # Create a list of random images l = [Nifti1Image(np.random.random((10, 10, 10)), np.eye(4)) for i in range(10)] cc = _utils.concat_niimgs(l) assert_equal(cc.shape[-1], 10) assert_array_almost_equal(cc.get_data()[..., 0], l[0].get_data()) # Same with iteration i = image.iter_img(l) cc = _utils.concat_niimgs(i) assert_equal(cc.shape[-1], 10) assert_array_almost_equal(cc.get_data()[..., 0], l[0].get_data()) # Now, a generator b = [] g = nifti_generator(b) cc = _utils.concat_niimgs(g) assert_equal(cc.shape[-1], 10) assert_equal(len(b), 10)
def test_iterator_generator(): # Create a list of random images l = [ Nifti1Image(np.random.random((10, 10, 10)), np.eye(4)) for i in range(10) ] cc = _utils.concat_niimgs(l) assert_equal(cc.shape[-1], 10) assert_array_almost_equal(cc.get_data()[..., 0], l[0].get_data()) # Same with iteration i = image.iter_img(l) cc = _utils.concat_niimgs(i) assert_equal(cc.shape[-1], 10) assert_array_almost_equal(cc.get_data()[..., 0], l[0].get_data()) # Now, a generator b = [] g = nifti_generator(b) cc = _utils.concat_niimgs(g) assert_equal(cc.shape[-1], 10) assert_equal(len(b), 10)
def test_csf_wm_extracted_signal(): dlayo = params['layout'] nb_run = params['data']['nb_run'] sub, sess = 1,1 runs_dir = osp.join(base_dir, dlayo['dir']['sub+'].format(sub), dlayo['dir']['sess+'].format(sess), dlayo['dir']['runs']) csf_dir = osp.join(runs_dir, dlayo['csf']['dir']) wm_dir = osp.join(runs_dir, dlayo['wm']['dir']) #print(csf_dir, ' + pat ', dlayo['csf']['roi_mask']) csf_file = gb.glob(osp.join(csf_dir, dlayo['csf']['roi_mask'])) if not csf_file: print("glob empty: {} {}".format( csf_dir, dlayo['csf']['roi_mask'])) csf_file = suf._check_glob_res(csf_file, ensure=1, files_only=True) wm_file = gb.glob(osp.join(wm_dir, dlayo['wm']['roi_mask'])) if not wm_file: print("glob empty: {} {}".format( wm_dir, dlayo['wm']['roi_mask'])) wm_file = suf._check_glob_res(wm_file, ensure=1, files_only=True) dir_smooth_imgs = osp.join(runs_dir, dlayo['dir']['smooth']) pat_imgs_files = dlayo['pat']['sub+sess+run+']+"*.nii*" runs_pat = [pat_imgs_files.format(sub, sess, run_idx) \ for run_idx in range(1, nb_run+1)] runs = [gb.glob(osp.join(dir_smooth_imgs, pat)) for pat in runs_pat] for run in runs: run.sort() #print("csf_file : ", csf_file) #print("csf_dir : ", csf_dir) #print("dir_smooth_imgs : ", dir_smooth_imgs) #for k in arielle_runs[0].keys(): # print(k) for idx, run in enumerate(runs): run_4d = concat_niimgs(run, ensure_ndim=4) #--- get CSF csf_arr, csf_labs = ucr.extract_roi_run(csf_dir, csf_file, run_4d, standardize=False, check_lengh=196, verbose=False) csf_mat_arr = arielle_runs[idx]['csf_map_0.93_erode'] assert_allclose(csf_mat_arr, csf_arr) #print(csf_labs) #--- get WM wm_arr, wm_labs = ucr.extract_roi_run(wm_dir, wm_file, run_4d, standardize=False, check_lengh=196, verbose=False) wm_mat_arr = arielle_runs[idx]['wm_map_0.99_erode'] assert_allclose(wm_mat_arr, wm_arr)
def do_one_run(run_curr, sess_curr, sub_curr, params, verbose=False): """ """ run_info = {} nvol = params['data']['nb_vol'] dt = params['data']['TR'] nb_run = params['data']['nb_run'] file_names = run_curr['file_names'] run_idx = run_curr['run_idx'] run_idx0 = run_idx - 1 assert run_idx0 >= 0 assert run_idx0 < nb_run #sub_idx = sub_curr['sub_idx'] sess_idx = sess_curr['sess_idx'] mvt_cond = run_curr['motion'] dsig = sess_curr['dsig'] mask = sess_curr['mask'] low_freq = params['analysis']['filter']['low_freq'] high_freq = params['analysis']['filter']['high_freq'] # signal file names #------------------- _fn_sig = params['layout']['out']['signals']['signals+'] # _fn_fsig = params['layout']['out']['signals']['f_signals+'] fn_sig = osp.join(dsig, _fn_sig.format(sess_idx, run_idx) + mvt_cond) # fn_fsig = osp.join(dsig, _fn_fsig.format(run_idx)+mvt_cond) # extract signals and save them in preproc/roi_signals #----------------------------------------------------- min_vox_roi = params['analysis']['min_vox_in_roi'] run_4d = concat_niimgs(file_names, ensure_ndim=4) signals, _issues, _info = ucr.extract_signals(sess_curr['droi'], sess_curr['roi_prefix'], run_4d, mask=mask, minvox=min_vox_roi) # construct matrix of counfounds #----------------------------------------------------- arr_counf = [] labs_counf = [] #--- get WM if params['analysis']['apply_wm']: wm_arr, wm_labs = ucr.extract_roi_run( sess_curr['wm_dir'], sess_curr['wm_filename'], run_4d, check_lengh=nvol, verbose=verbose) labs_counf = labs_counf + wm_labs arr_counf.append(wm_arr) if verbose: print("applying wm \n") else: wm_arr, wm_labs = None, None #--- get CSF if params['analysis']['apply_csf']: csf_arr, csf_labs = ucr.extract_roi_run( sess_curr['csf_dir'], sess_curr['csf_filename'], run_4d, check_lengh=nvol, verbose=verbose) labs_counf = labs_counf + csf_labs arr_counf.append(csf_arr) if verbose: print("applying csf \n") else: csf_arr, csf_labs = None, None #--- get GR if params['analysis']['apply_global_reg']: gr_arr, gr_labs = ucr.extract_roi_run( sess_curr['mask_dir'], sess_curr['mask_filename'], run_4d, check_lengh=nvol, verbose=verbose) labs_counf = labs_counf + gr_labs arr_counf.append(gr_arr) if verbose: print("applying GR \n") else: gr_arr, gr_labs = None, None #--- get MVT if params['analysis']['apply_mvt']: mvt_arr, mvt_labs = ucr.extract_mvt(sess_curr['mvtfile'], run_idx0, nvol, verbose=verbose) labs_counf = labs_counf + mvt_labs arr_counf.append(mvt_arr) if verbose: print("applying mvt \n") else: mvt_arr, mvt_labs = None, None #--- get cosine functions; if params['analysis']['apply_filter']: bf_arr, bf_labs = ucr.extract_bf(low_freq, high_freq, nvol, dt, verbose=verbose) labs_counf = labs_counf + bf_labs arr_counf.append(bf_arr) if verbose: print("applying filter \n") else: bf_arr, bf_labs = None, None #--- put it together # arr_counf = np.hstack((wm_arr, csf_arr, mvt_arr, bf_arr)) # labs_counf = wm_labs + csf_labs + mvt_labs + bf_labs if arr_counf: some_counfounds = True arr_counf = np.hstack(tuple(arr_counf)) if verbose: #print("wm.shape {}, csf.shape {}, mvt.shape {}, bf.shape {}".format( # wm_arr.shape, csf_arr.shape, mvt_arr.shape, bf_arr.shape)) print(arr_counf.shape) print(labs_counf[:17]) #run_info['shapes'] = (wm_arr.shape, csf_arr.shape, mvt_arr.shape, bf_arr.shape) #run_info['mean_csf'] = csf_arr.mean(axis=0) #run_info['mean_mvt'] = mvt_arr.mean(axis=0) # filter and save #----------------------------------------------------- arr_sig, labels_sig = ucr._dict_signals_to_arr(signals) if some_counfounds: arr_sig_f = ucr.R_proj(arr_counf, arr_sig) else: arr_sig_f = arr_sig run_info = dict(arr_sig=arr_sig, labels_sig=labels_sig, issues=_issues, info=_info, arr_sig_f=arr_sig_f, arr_counf=arr_counf, labs_counf=labs_counf) # save filtered signals save_is_true = params['analysis']['write_signals'] if save_is_true: np.savez(fn_sig, arr_sig=arr_sig, labels_sig=labels_sig, issues=_issues, info=_info, arr_sig_f=arr_sig_f, arr_counf=arr_counf, labs_counf=labs_counf, params=params) #np.savez(fn_fsig, arr_sig_f=arr_sig_f, arr_counf=arr_counf, labs_counf=labs_counf) return run_info
def convert(subject_id, henson_base_dir, output_base_dir, run_ids=None, resample_if_necessary=False): if run_ids is None: run_ids = range(1, 10) henson_subject_dir = path(henson_base_dir) / ("Sub%02d" % subject_id) openfmri_subject_dir = path(output_base_dir) / ("sub%03d" % subject_id) # Take all fmri volumes and concatenate them into one henson_bold_dir = henson_subject_dir / "BOLD" openfmri_bold_dir = openfmri_subject_dir / "BOLD" for run_id in run_ids: print "run id %d" % run_id henson_run_dir = henson_bold_dir / ("Run_%02d" % run_id) openfmri_run_dir = openfmri_bold_dir / "task001_run001" if not openfmri_run_dir.exists(): openfmri_run_dir.makedirs() henson_run_files = sorted(henson_run_dir.glob( "fMR09029-00??-00???-000???-01.nii")) if subject_id == 14 and run_id == 2: # File fMR09029-0004-00010-000010-01.nii is broken henson_run_files.remove(henson_run_dir / "fMR09029-0004-00010-000010-01.nii") try: # This only works if affines, shape, etc are equal # it is actually not the case as I just saw concatenated = concat_niimgs(henson_run_files) except ValueError: ref_affine = nb.load(henson_run_files[0]).get_affine() niimgs = [nb.load(hrf) for hrf in henson_run_files] for i, niimg in enumerate(niimgs): aff = niimg.get_affine() if np.abs( np.linalg.norm( np.linalg.inv(ref_affine).dot(aff) - np.eye(4), 2)) > 1e-6: warnings.warn("File %s has a significantly different affine %s from %s" % (hrf.basename(), str(aff), str(ref_affine))) if resample_if_necessary: niimgs[i] = resample_img(niimg, target_affine=ref_affine, target_shape=niimg.shape) else: tmp = niimg.get_affine() niimg._affine = ref_affine concatenated = concat_niimgs(niimgs) nb.save(concatenated, openfmri_run_dir / "bold.nii.gz") keep_filenames_file = openfmri_run_dir / "original_files.json" json.dump([hrf.basename() for hrf in henson_run_files], open(keep_filenames_file, "w")) # Copy anat file henson_anat_file = henson_subject_dir / "T1" / "mprage.nii" openfmri_anat_file = (openfmri_subject_dir / "anatomy" / "highres001.nii.gz") if not openfmri_anat_file.dirname().exists(): openfmri_anat_file.dirname().makedirs() nb.save(nb.load(henson_anat_file), openfmri_anat_file)
def test_concat_niimgs(): # create images different in affine and 3D/4D shape shape = (10, 11, 12) affine = np.eye(4) img1 = Nifti1Image(np.ones(shape), affine) img2 = Nifti1Image(np.ones(shape), 2 * affine) img3 = Nifti1Image(np.zeros(shape), affine) img4d = Nifti1Image(np.ones(shape + (2, )), affine) shape2 = (12, 11, 10) img1b = Nifti1Image(np.ones(shape2), affine) shape3 = (11, 22, 33) img1c = Nifti1Image(np.ones(shape3), affine) # Regression test for #601. Dimensionality of first image was not checked # properly _dimension_error_msg = ("Input data has incompatible dimensionality: " "Expected dimension is 4D and you provided " "a list of 4D images \\(5D\\)") with pytest.raises(DimensionError, match=_dimension_error_msg): _utils.concat_niimgs([img4d], ensure_ndim=4) # check basic concatenation with equal shape/affine concatenated = _utils.concat_niimgs((img1, img3, img1)) with pytest.raises(DimensionError, match=_dimension_error_msg): _utils.concat_niimgs([img1, img4d]) # smoke-test auto_resample concatenated = _utils.concat_niimgs((img1, img1b, img1c), auto_resample=True) assert concatenated.shape == img1.shape + (3, ) # check error for non-forced but necessary resampling with pytest.raises(ValueError, match='Field of view of image'): _utils.concat_niimgs([img1, img2], auto_resample=False) # test list of 4D niimgs as input tempdir = tempfile.mkdtemp() tmpimg1 = os.path.join(tempdir, '1.nii') tmpimg2 = os.path.join(tempdir, '2.nii') try: nibabel.save(img1, tmpimg1) nibabel.save(img3, tmpimg2) concatenated = _utils.concat_niimgs(os.path.join(tempdir, '*')) assert_array_equal(get_data(concatenated)[..., 0], get_data(img1)) assert_array_equal(get_data(concatenated)[..., 1], get_data(img3)) finally: _remove_if_exists(tmpimg1) _remove_if_exists(tmpimg2) if os.path.exists(tempdir): os.removedirs(tempdir) img5d = Nifti1Image(np.ones((2, 2, 2, 2, 2)), affine) with pytest.raises(TypeError, match='Concatenated images must be 3D or 4D. ' 'You gave a list of 5D images'): _utils.concat_niimgs([img5d, img5d])